var/home/core/zuul-output/0000755000175000017500000000000015067031206014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067044152015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005103136215067044143017702 0ustar rootrootSep 30 19:46:36 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 19:46:37 crc restorecon[4571]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 19:46:37 crc restorecon[4571]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 19:46:38 crc kubenswrapper[4603]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.503552 4603 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512030 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512072 4603 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512084 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512094 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512104 4603 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512114 4603 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512123 4603 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512132 4603 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512142 4603 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512151 4603 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512159 4603 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512214 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512223 4603 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512232 4603 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512241 4603 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512250 4603 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512258 4603 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512266 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512276 4603 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512284 4603 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512293 4603 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512313 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512321 4603 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512331 4603 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512340 4603 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512350 4603 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512372 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512380 4603 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512388 4603 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512396 4603 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512404 4603 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512411 4603 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512419 4603 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512427 4603 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512435 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512443 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512451 4603 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512459 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512467 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512475 4603 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512485 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512495 4603 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512504 4603 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512512 4603 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512520 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512530 4603 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512540 4603 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512548 4603 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512557 4603 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512565 4603 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512574 4603 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512581 4603 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512591 4603 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512599 4603 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512606 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512614 4603 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512621 4603 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512629 4603 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512636 4603 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512644 4603 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512651 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512659 4603 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512678 4603 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512688 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512698 4603 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512708 4603 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512717 4603 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512725 4603 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512734 4603 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512741 4603 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.512748 4603 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513794 4603 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513822 4603 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513848 4603 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513860 4603 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513871 4603 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513881 4603 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513893 4603 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513904 4603 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513914 4603 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513923 4603 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513933 4603 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513943 4603 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513952 4603 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513962 4603 flags.go:64] FLAG: --cgroup-root="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513970 4603 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513979 4603 flags.go:64] FLAG: --client-ca-file="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513988 4603 flags.go:64] FLAG: --cloud-config="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.513997 4603 flags.go:64] FLAG: --cloud-provider="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514005 4603 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514021 4603 flags.go:64] FLAG: --cluster-domain="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514030 4603 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514040 4603 flags.go:64] FLAG: --config-dir="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514048 4603 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514058 4603 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514069 4603 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514078 4603 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514089 4603 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514122 4603 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514133 4603 flags.go:64] FLAG: --contention-profiling="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514142 4603 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514151 4603 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514161 4603 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514198 4603 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514209 4603 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514218 4603 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514227 4603 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514236 4603 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514246 4603 flags.go:64] FLAG: --enable-server="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514255 4603 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514273 4603 flags.go:64] FLAG: --event-burst="100" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514283 4603 flags.go:64] FLAG: --event-qps="50" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514292 4603 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514302 4603 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514311 4603 flags.go:64] FLAG: --eviction-hard="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514322 4603 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514332 4603 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514341 4603 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514352 4603 flags.go:64] FLAG: --eviction-soft="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514361 4603 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514370 4603 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514380 4603 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514389 4603 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514398 4603 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514408 4603 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514417 4603 flags.go:64] FLAG: --feature-gates="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514427 4603 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514437 4603 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514446 4603 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514458 4603 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514467 4603 flags.go:64] FLAG: --healthz-port="10248" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514476 4603 flags.go:64] FLAG: --help="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514485 4603 flags.go:64] FLAG: --hostname-override="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514494 4603 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514515 4603 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514524 4603 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514533 4603 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514541 4603 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514550 4603 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514560 4603 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514568 4603 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514577 4603 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514586 4603 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514595 4603 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514605 4603 flags.go:64] FLAG: --kube-reserved="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514614 4603 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514623 4603 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514632 4603 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514674 4603 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514686 4603 flags.go:64] FLAG: --lock-file="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514694 4603 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514704 4603 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514713 4603 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514726 4603 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514735 4603 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514746 4603 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514755 4603 flags.go:64] FLAG: --logging-format="text" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514764 4603 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514773 4603 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514782 4603 flags.go:64] FLAG: --manifest-url="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514791 4603 flags.go:64] FLAG: --manifest-url-header="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514804 4603 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514813 4603 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514824 4603 flags.go:64] FLAG: --max-pods="110" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514833 4603 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514843 4603 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514851 4603 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514860 4603 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514869 4603 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514878 4603 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514900 4603 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514920 4603 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514930 4603 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514939 4603 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514948 4603 flags.go:64] FLAG: --pod-cidr="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514957 4603 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514971 4603 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514980 4603 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514989 4603 flags.go:64] FLAG: --pods-per-core="0" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.514998 4603 flags.go:64] FLAG: --port="10250" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515008 4603 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515016 4603 flags.go:64] FLAG: --provider-id="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515025 4603 flags.go:64] FLAG: --qos-reserved="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515034 4603 flags.go:64] FLAG: --read-only-port="10255" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515044 4603 flags.go:64] FLAG: --register-node="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515053 4603 flags.go:64] FLAG: --register-schedulable="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515062 4603 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515076 4603 flags.go:64] FLAG: --registry-burst="10" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515085 4603 flags.go:64] FLAG: --registry-qps="5" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515096 4603 flags.go:64] FLAG: --reserved-cpus="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515104 4603 flags.go:64] FLAG: --reserved-memory="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515117 4603 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515126 4603 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515135 4603 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515144 4603 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515153 4603 flags.go:64] FLAG: --runonce="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515186 4603 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515196 4603 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515249 4603 flags.go:64] FLAG: --seccomp-default="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515304 4603 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515314 4603 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515324 4603 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515333 4603 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515343 4603 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515352 4603 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515361 4603 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515387 4603 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515402 4603 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515412 4603 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515422 4603 flags.go:64] FLAG: --system-cgroups="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515431 4603 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515445 4603 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515453 4603 flags.go:64] FLAG: --tls-cert-file="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515462 4603 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515483 4603 flags.go:64] FLAG: --tls-min-version="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515492 4603 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515501 4603 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515511 4603 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515520 4603 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515530 4603 flags.go:64] FLAG: --v="2" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515542 4603 flags.go:64] FLAG: --version="false" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515553 4603 flags.go:64] FLAG: --vmodule="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515563 4603 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.515573 4603 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515861 4603 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515873 4603 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515882 4603 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515893 4603 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515903 4603 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515911 4603 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515919 4603 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515928 4603 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515936 4603 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515945 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515953 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515960 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515968 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515975 4603 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515983 4603 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.515994 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516002 4603 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516009 4603 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516031 4603 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516042 4603 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516052 4603 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516060 4603 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516069 4603 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516077 4603 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516085 4603 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516093 4603 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516100 4603 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516108 4603 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516115 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516123 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516131 4603 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516138 4603 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516146 4603 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516154 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516161 4603 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516200 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516208 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516215 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516223 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516231 4603 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516238 4603 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516246 4603 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516253 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516261 4603 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516268 4603 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516276 4603 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516284 4603 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516294 4603 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516304 4603 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516313 4603 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516321 4603 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516330 4603 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516345 4603 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516353 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516390 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516398 4603 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516406 4603 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516414 4603 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516421 4603 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516430 4603 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516437 4603 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516445 4603 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516454 4603 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516461 4603 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516469 4603 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516480 4603 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516488 4603 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516495 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516503 4603 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516511 4603 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.516518 4603 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.516543 4603 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.531373 4603 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.531817 4603 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531951 4603 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531962 4603 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531967 4603 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531974 4603 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531978 4603 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531984 4603 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531990 4603 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.531999 4603 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532004 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532010 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532016 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532021 4603 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532025 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532030 4603 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532034 4603 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532039 4603 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532045 4603 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532049 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532054 4603 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532059 4603 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532064 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532069 4603 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532076 4603 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532082 4603 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532088 4603 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532094 4603 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532099 4603 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532110 4603 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532115 4603 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532120 4603 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532124 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532130 4603 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532134 4603 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532139 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532144 4603 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532149 4603 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532154 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532158 4603 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532177 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532184 4603 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532188 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532193 4603 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532198 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532202 4603 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532206 4603 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532211 4603 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532215 4603 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532219 4603 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532224 4603 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532228 4603 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532232 4603 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532237 4603 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532241 4603 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532246 4603 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532251 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532256 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532260 4603 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532264 4603 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532269 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532274 4603 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532278 4603 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532282 4603 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532287 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532294 4603 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532299 4603 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532304 4603 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532310 4603 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532315 4603 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532320 4603 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532325 4603 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532330 4603 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.532338 4603 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532495 4603 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532506 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532511 4603 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532515 4603 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532520 4603 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532524 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532528 4603 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532533 4603 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532538 4603 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532542 4603 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532548 4603 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532551 4603 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532556 4603 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532561 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532566 4603 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532571 4603 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532575 4603 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532580 4603 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532584 4603 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532588 4603 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532593 4603 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532597 4603 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532602 4603 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532606 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532610 4603 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532615 4603 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532619 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532625 4603 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532631 4603 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532639 4603 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532646 4603 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532652 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532657 4603 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532661 4603 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532666 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532672 4603 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532677 4603 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532683 4603 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532687 4603 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532692 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532698 4603 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532703 4603 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532707 4603 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532711 4603 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532715 4603 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532720 4603 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532724 4603 feature_gate.go:330] unrecognized feature gate: Example Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532728 4603 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532732 4603 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532736 4603 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532740 4603 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532745 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532749 4603 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532753 4603 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532757 4603 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532761 4603 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532766 4603 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532770 4603 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532774 4603 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532778 4603 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532782 4603 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532786 4603 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532791 4603 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532795 4603 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532799 4603 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532803 4603 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532807 4603 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532812 4603 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532816 4603 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532821 4603 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.532825 4603 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.532834 4603 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.534147 4603 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.539114 4603 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.539277 4603 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.541768 4603 server.go:997] "Starting client certificate rotation" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.541806 4603 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.543293 4603 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-21 10:36:53.686926726 +0000 UTC Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.543410 4603 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1958h50m15.143522203s for next certificate rotation Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.566011 4603 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.568979 4603 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.589907 4603 log.go:25] "Validated CRI v1 runtime API" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.628951 4603 log.go:25] "Validated CRI v1 image API" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.632085 4603 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.640055 4603 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-19-40-58-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.640099 4603 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.657132 4603 manager.go:217] Machine: {Timestamp:2025-09-30 19:46:38.652034253 +0000 UTC m=+0.590493091 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9 BootID:108450f4-bea7-4e7f-9d53-eb895322e83f Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:b9:39:b2 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:b9:39:b2 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:98:14:d8 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ff:2a:1e Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:20:4d:5f Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:1c:81:19 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:46:e0:7d:ad:18:ea Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f2:be:f5:6b:e2:10 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.657712 4603 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.657956 4603 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.658770 4603 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.659135 4603 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.659249 4603 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.659604 4603 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.659624 4603 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.660570 4603 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.660634 4603 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.661589 4603 state_mem.go:36] "Initialized new in-memory state store" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.661757 4603 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.665118 4603 kubelet.go:418] "Attempting to sync node with API server" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.665156 4603 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.665278 4603 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.665303 4603 kubelet.go:324] "Adding apiserver pod source" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.665323 4603 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.672537 4603 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.674102 4603 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.675712 4603 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.675804 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.675940 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.675932 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.676030 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677613 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677655 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677671 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677684 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677708 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677725 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677738 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677761 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677776 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677791 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677811 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.677824 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.678654 4603 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.679425 4603 server.go:1280] "Started kubelet" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.680943 4603 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 19:46:38 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.680901 4603 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.682582 4603 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.683325 4603 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.683801 4603 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.683934 4603 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.684708 4603 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 11:10:53.373176858 +0000 UTC Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.685929 4603 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2415h24m14.687256435s for next certificate rotation Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.686518 4603 server.go:460] "Adding debug handlers to kubelet server" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.685050 4603 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.687243 4603 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.688077 4603 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.688742 4603 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.689019 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="200ms" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.689507 4603 factory.go:55] Registering systemd factory Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.689557 4603 factory.go:221] Registration of the systemd container factory successfully Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.694721 4603 factory.go:153] Registering CRI-O factory Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.694830 4603 factory.go:221] Registration of the crio container factory successfully Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.695034 4603 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.695067 4603 factory.go:103] Registering Raw factory Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.695129 4603 manager.go:1196] Started watching for new ooms in manager Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.695413 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.695554 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.699708 4603 manager.go:319] Starting recovery of all containers Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.699482 4603 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.129:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a271a60452b4d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 19:46:38.679378765 +0000 UTC m=+0.617837613,LastTimestamp:2025-09-30 19:46:38.679378765 +0000 UTC m=+0.617837613,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707528 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707596 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707613 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707629 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707647 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707661 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707674 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707689 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707706 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707722 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707733 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707748 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707759 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707781 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707793 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707810 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707824 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707837 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707855 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707870 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707883 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707895 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707906 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707921 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707933 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707951 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707967 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.707986 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708000 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708023 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708037 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708053 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708068 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708115 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708132 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708145 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708176 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708191 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708208 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708226 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708240 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708260 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708329 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708344 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708363 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708378 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708393 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708408 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708420 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708436 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708452 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708502 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708530 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708552 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708573 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708586 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708607 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708621 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708640 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708654 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708675 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708688 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708699 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708714 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708726 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708739 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708752 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708765 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708780 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708798 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708813 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708824 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708835 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708850 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708863 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708897 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708912 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708923 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708937 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708953 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708969 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.708988 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709002 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709019 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709029 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709039 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709053 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709063 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709077 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709089 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709100 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709116 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709132 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709147 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709181 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709194 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709208 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709222 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709239 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709250 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709261 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709274 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709285 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709299 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709323 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709380 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709433 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709500 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709516 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709559 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709579 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709605 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709623 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709639 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709658 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709671 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709683 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709697 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709708 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709722 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709733 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709747 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709761 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709774 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709796 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709823 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709834 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709851 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709865 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.709878 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711317 4603 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711344 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711355 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711369 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711378 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711388 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711402 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711412 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711426 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711437 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711448 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711463 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711473 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711486 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711497 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711508 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711521 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711531 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711547 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711558 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711568 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711579 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711591 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711604 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711625 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711645 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711659 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711670 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711684 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711699 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711709 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711722 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711734 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711749 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711797 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.711813 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.715329 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.718942 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.718971 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.718986 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719000 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719066 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719082 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719094 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719109 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719121 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719134 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719146 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719160 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719190 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719202 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719214 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719228 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719243 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719258 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719270 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719282 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719293 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719305 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719316 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719330 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719342 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719354 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719368 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719384 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719396 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719409 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719421 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719435 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719450 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719462 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719480 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719492 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719505 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719517 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719529 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719540 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719553 4603 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719566 4603 reconstruct.go:97] "Volume reconstruction finished" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.719575 4603 reconciler.go:26] "Reconciler: start to sync state" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.729316 4603 manager.go:324] Recovery completed Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.739617 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.744814 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.744887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.744901 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.745864 4603 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.745910 4603 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.746033 4603 state_mem.go:36] "Initialized new in-memory state store" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.760954 4603 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.762920 4603 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.762970 4603 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.763012 4603 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.763080 4603 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 19:46:38 crc kubenswrapper[4603]: W0930 19:46:38.768974 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.769062 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.772541 4603 policy_none.go:49] "None policy: Start" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.775212 4603 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.775249 4603 state_mem.go:35] "Initializing new in-memory state store" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.790623 4603 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.828907 4603 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.129:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a271a60452b4d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 19:46:38.679378765 +0000 UTC m=+0.617837613,LastTimestamp:2025-09-30 19:46:38.679378765 +0000 UTC m=+0.617837613,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.837605 4603 manager.go:334] "Starting Device Plugin manager" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.837685 4603 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.837709 4603 server.go:79] "Starting device plugin registration server" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.838346 4603 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.838377 4603 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.838747 4603 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.838889 4603 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.838904 4603 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.852804 4603 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.864042 4603 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.864188 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.865623 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.865682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.865700 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.866010 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.866301 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.866339 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867255 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867300 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867312 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867323 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867364 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867376 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.867487 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.868008 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.868044 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.869069 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.869432 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.870383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.869122 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.870447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.870467 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.870781 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.870966 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.871092 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872283 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872326 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.872565 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873266 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873335 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873763 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.873999 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.874027 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.874387 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.874418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.874435 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.876411 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.876436 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.876451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.889903 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="400ms" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.921602 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923141 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923244 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923278 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923309 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923340 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923368 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923399 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923433 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923467 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923495 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923523 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923552 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923578 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.923608 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.939354 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.940893 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.940945 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.940963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:38 crc kubenswrapper[4603]: I0930 19:46:38.941002 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:38 crc kubenswrapper[4603]: E0930 19:46:38.941756 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.129:6443: connect: connection refused" node="crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025342 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025423 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025458 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025491 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025542 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025575 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025604 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025611 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025652 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025732 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025634 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025759 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025791 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025813 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025827 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025838 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025837 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025859 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025892 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025915 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025933 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025957 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.025987 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026023 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026037 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026058 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026074 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026113 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026223 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.026368 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.142753 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.144483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.144538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.144555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.144590 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.145268 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.129:6443: connect: connection refused" node="crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.192364 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.218384 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.237997 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.241134 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-9b6b7cfa0b6a362223c87f06a3af39f54100208b391488c5dccea96b22f0d1d1 WatchSource:0}: Error finding container 9b6b7cfa0b6a362223c87f06a3af39f54100208b391488c5dccea96b22f0d1d1: Status 404 returned error can't find the container with id 9b6b7cfa0b6a362223c87f06a3af39f54100208b391488c5dccea96b22f0d1d1 Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.247473 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.250699 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-ed8f5e80424fa0943149d48c79fde213f8413492e5b4f7cb2f2c21c90da1c381 WatchSource:0}: Error finding container ed8f5e80424fa0943149d48c79fde213f8413492e5b4f7cb2f2c21c90da1c381: Status 404 returned error can't find the container with id ed8f5e80424fa0943149d48c79fde213f8413492e5b4f7cb2f2c21c90da1c381 Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.253245 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.268686 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-5b1b13614752045167a4e498974aa731285b95c71143b54714978599b00a676f WatchSource:0}: Error finding container 5b1b13614752045167a4e498974aa731285b95c71143b54714978599b00a676f: Status 404 returned error can't find the container with id 5b1b13614752045167a4e498974aa731285b95c71143b54714978599b00a676f Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.278190 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-7de3e4800c56c0249df6a05be983b22d2ed1fb3deb35056985731990983fe807 WatchSource:0}: Error finding container 7de3e4800c56c0249df6a05be983b22d2ed1fb3deb35056985731990983fe807: Status 404 returned error can't find the container with id 7de3e4800c56c0249df6a05be983b22d2ed1fb3deb35056985731990983fe807 Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.280295 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-ab323524596db1b35d96a74480f2e4194877efc6540b7d6cf340b92a1bee0784 WatchSource:0}: Error finding container ab323524596db1b35d96a74480f2e4194877efc6540b7d6cf340b92a1bee0784: Status 404 returned error can't find the container with id ab323524596db1b35d96a74480f2e4194877efc6540b7d6cf340b92a1bee0784 Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.291320 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="800ms" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.545837 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.547451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.547498 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.547512 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.547541 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.548195 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.129:6443: connect: connection refused" node="crc" Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.564772 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.564882 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.617844 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.617928 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.684246 4603 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.769483 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ab323524596db1b35d96a74480f2e4194877efc6540b7d6cf340b92a1bee0784"} Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.770871 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5b1b13614752045167a4e498974aa731285b95c71143b54714978599b00a676f"} Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.771726 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ed8f5e80424fa0943149d48c79fde213f8413492e5b4f7cb2f2c21c90da1c381"} Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.773571 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9b6b7cfa0b6a362223c87f06a3af39f54100208b391488c5dccea96b22f0d1d1"} Sep 30 19:46:39 crc kubenswrapper[4603]: I0930 19:46:39.774729 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7de3e4800c56c0249df6a05be983b22d2ed1fb3deb35056985731990983fe807"} Sep 30 19:46:39 crc kubenswrapper[4603]: W0930 19:46:39.788588 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:39 crc kubenswrapper[4603]: E0930 19:46:39.788674 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:40 crc kubenswrapper[4603]: E0930 19:46:40.092920 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="1.6s" Sep 30 19:46:40 crc kubenswrapper[4603]: W0930 19:46:40.242294 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:40 crc kubenswrapper[4603]: E0930 19:46:40.242410 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.349320 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.351436 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.351495 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.351513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.351550 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:40 crc kubenswrapper[4603]: E0930 19:46:40.352196 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.129:6443: connect: connection refused" node="crc" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.685043 4603 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.780663 4603 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517" exitCode=0 Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.780774 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.780838 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.782508 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.782572 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.782590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.782879 4603 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516" exitCode=0 Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.782961 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.783092 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.784756 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.785202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.785261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.785276 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.785879 4603 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745" exitCode=0 Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.785982 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.786013 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.786288 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.786391 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.786420 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.787344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.787378 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.787398 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.789912 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.789979 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.790005 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.790009 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.790131 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.793056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.793517 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.793706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.794432 4603 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba" exitCode=0 Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.794527 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba"} Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.794541 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.795725 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.795753 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:40 crc kubenswrapper[4603]: I0930 19:46:40.795763 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.684958 4603 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:41 crc kubenswrapper[4603]: E0930 19:46:41.694044 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.129:6443: connect: connection refused" interval="3.2s" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.800658 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0fa33729df39dac39eb5ac092f4dcce8214c97279f6d6828ba2ba90051d77ed6"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.800734 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.801970 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.802009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.802018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.806861 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.806900 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.806911 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.806959 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.807966 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.807988 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.808000 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.811616 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.811672 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.811687 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.811703 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.813374 4603 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3" exitCode=0 Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.813505 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.814051 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.814452 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3"} Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819276 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819274 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819317 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.819328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.952625 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.953874 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.953912 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.953925 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:41 crc kubenswrapper[4603]: I0930 19:46:41.953956 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:41 crc kubenswrapper[4603]: E0930 19:46:41.954529 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.129:6443: connect: connection refused" node="crc" Sep 30 19:46:41 crc kubenswrapper[4603]: W0930 19:46:41.966736 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:41 crc kubenswrapper[4603]: E0930 19:46:41.966824 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:42 crc kubenswrapper[4603]: W0930 19:46:42.285748 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.129:6443: connect: connection refused Sep 30 19:46:42 crc kubenswrapper[4603]: E0930 19:46:42.285837 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.129:6443: connect: connection refused" logger="UnhandledError" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.820490 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990"} Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.820682 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.822025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.822077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.822094 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825385 4603 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006" exitCode=0 Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825516 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006"} Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825622 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825626 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825907 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.825624 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.826688 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.826732 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.826745 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827616 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827639 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827727 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:42 crc kubenswrapper[4603]: I0930 19:46:42.827748 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834010 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dda3eb1f012fcddd1cf943a29ae83678c98089cb7ccd10c19eba403281d2b034"} Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834079 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cdd7af980975ed9eb11145de47abe761f5706b439d1f657c9f74123a92450434"} Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834087 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834197 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834093 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d9f782fa9240d87d0243b8bca592fbe6aaa9c6acec0f8bdd1891e8faa02c8442"} Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.834797 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c3730b24ab199fb09d7e4477cf8dcf4ad2d9bde57fc4282452c245b52a524be7"} Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.835593 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.835633 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:43 crc kubenswrapper[4603]: I0930 19:46:43.835644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.195997 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.196301 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.197971 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.198083 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.198210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.207566 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.793145 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.846558 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8b90c8bc4859808ec3f46a5b20b7b49824b49b6d2186b9ddad6bebdf50630d8c"} Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.846665 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.847230 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.846872 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.846751 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.848583 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.848641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.848659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.848912 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.849034 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.849183 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.849370 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.849416 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:44 crc kubenswrapper[4603]: I0930 19:46:44.849432 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.155156 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.156743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.156790 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.156802 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.156829 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.409216 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.409458 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.410912 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.410948 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.410958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.520069 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.849619 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.849687 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.850365 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.850918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.850952 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.850965 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.851314 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.851358 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:45 crc kubenswrapper[4603]: I0930 19:46:45.851369 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:46 crc kubenswrapper[4603]: I0930 19:46:46.116843 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:46 crc kubenswrapper[4603]: I0930 19:46:46.117079 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:46 crc kubenswrapper[4603]: I0930 19:46:46.118787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:46 crc kubenswrapper[4603]: I0930 19:46:46.118826 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:46 crc kubenswrapper[4603]: I0930 19:46:46.118838 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.470809 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.471002 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.472256 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.472311 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.472328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.481973 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.482208 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.483603 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.483641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:48 crc kubenswrapper[4603]: I0930 19:46:48.483654 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:48 crc kubenswrapper[4603]: E0930 19:46:48.852996 4603 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.671248 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.671492 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.672949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.673013 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.673025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.679810 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.860485 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.862266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.862341 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:49 crc kubenswrapper[4603]: I0930 19:46:49.862367 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:50 crc kubenswrapper[4603]: I0930 19:46:50.614638 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:46:50 crc kubenswrapper[4603]: I0930 19:46:50.862812 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:50 crc kubenswrapper[4603]: I0930 19:46:50.863564 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:50 crc kubenswrapper[4603]: I0930 19:46:50.863587 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:50 crc kubenswrapper[4603]: I0930 19:46:50.863599 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.685223 4603 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:46:52 crc kubenswrapper[4603]: W0930 19:46:52.857067 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.857189 4603 trace.go:236] Trace[291952241]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:46:42.856) (total time: 10001ms): Sep 30 19:46:52 crc kubenswrapper[4603]: Trace[291952241]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:46:52.857) Sep 30 19:46:52 crc kubenswrapper[4603]: Trace[291952241]: [10.00111926s] [10.00111926s] END Sep 30 19:46:52 crc kubenswrapper[4603]: E0930 19:46:52.857215 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.870723 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.873216 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990"} Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.873310 4603 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990" exitCode=255 Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.873571 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.874620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.874740 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.874809 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:52 crc kubenswrapper[4603]: I0930 19:46:52.875423 4603 scope.go:117] "RemoveContainer" containerID="acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990" Sep 30 19:46:53 crc kubenswrapper[4603]: W0930 19:46:53.214681 4603 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.214796 4603 trace.go:236] Trace[868043921]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:46:43.212) (total time: 10001ms): Sep 30 19:46:53 crc kubenswrapper[4603]: Trace[868043921]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:46:53.214) Sep 30 19:46:53 crc kubenswrapper[4603]: Trace[868043921]: [10.001937074s] [10.001937074s] END Sep 30 19:46:53 crc kubenswrapper[4603]: E0930 19:46:53.214826 4603 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.458551 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.458778 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.460229 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.460282 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.460293 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.540498 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.615063 4603 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.615128 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.653863 4603 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.653941 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.672437 4603 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]log ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]etcd ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-api-request-count-filter ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-startkubeinformers ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/priority-and-fairness-config-consumer ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/priority-and-fairness-filter ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-apiextensions-informers ok Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/crd-informer-synced failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-system-namespaces-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-cluster-authentication-info-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-legacy-token-tracking-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-service-ip-repair-controllers ok Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/priority-and-fairness-config-producer failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/bootstrap-controller failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/start-kube-aggregator-informers ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/apiservice-status-local-available-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/apiservice-status-remote-available-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/apiservice-wait-for-first-sync ok Sep 30 19:46:53 crc kubenswrapper[4603]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/kube-apiserver-autoregistration ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]autoregister-completion ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/apiservice-openapi-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: [+]poststarthook/apiservice-openapiv3-controller ok Sep 30 19:46:53 crc kubenswrapper[4603]: livez check failed Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.673309 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.878979 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.880809 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5"} Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.880887 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.881057 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882191 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882215 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882224 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882721 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882761 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.882773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:53 crc kubenswrapper[4603]: I0930 19:46:53.899555 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 19:46:54 crc kubenswrapper[4603]: I0930 19:46:54.883680 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:54 crc kubenswrapper[4603]: I0930 19:46:54.884690 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:54 crc kubenswrapper[4603]: I0930 19:46:54.884754 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:54 crc kubenswrapper[4603]: I0930 19:46:54.884771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.526510 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.526993 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.527137 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.528370 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.528408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.528422 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.532199 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.886540 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.887931 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.887972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:55 crc kubenswrapper[4603]: I0930 19:46:55.887986 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:56 crc kubenswrapper[4603]: I0930 19:46:56.888867 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:46:56 crc kubenswrapper[4603]: I0930 19:46:56.890296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:46:56 crc kubenswrapper[4603]: I0930 19:46:56.890341 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:46:56 crc kubenswrapper[4603]: I0930 19:46:56.890355 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:46:57 crc kubenswrapper[4603]: I0930 19:46:57.901531 4603 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.633534 4603 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.635870 4603 trace.go:236] Trace[773961744]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:46:46.285) (total time: 12350ms): Sep 30 19:46:58 crc kubenswrapper[4603]: Trace[773961744]: ---"Objects listed" error: 12350ms (19:46:58.635) Sep 30 19:46:58 crc kubenswrapper[4603]: Trace[773961744]: [12.350771925s] [12.350771925s] END Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.635910 4603 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.638919 4603 trace.go:236] Trace[1805389723]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 19:46:47.512) (total time: 11126ms): Sep 30 19:46:58 crc kubenswrapper[4603]: Trace[1805389723]: ---"Objects listed" error: 11126ms (19:46:58.638) Sep 30 19:46:58 crc kubenswrapper[4603]: Trace[1805389723]: [11.126217168s] [11.126217168s] END Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.638965 4603 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.641641 4603 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.642571 4603 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.677911 4603 apiserver.go:52] "Watching apiserver" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.680812 4603 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.681198 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.681651 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.681810 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.681939 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.682054 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.682310 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.682365 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.682412 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.682521 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.682644 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.685578 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.685657 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.686233 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.686540 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.686655 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.687689 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.688207 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.689392 4603 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.691126 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.691817 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.728987 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743191 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743248 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743274 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743294 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743314 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743334 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743351 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743369 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743387 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743407 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743427 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743451 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743472 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743493 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743584 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743609 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743631 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743687 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743711 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743732 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743754 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743776 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743797 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743819 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743841 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743864 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743859 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743889 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.743981 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744021 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744057 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744090 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744099 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744122 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744154 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744218 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744255 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744286 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744309 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744337 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744322 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744423 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744445 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744468 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744491 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744508 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744519 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744529 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744529 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744559 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744586 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744616 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744612 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744641 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744668 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744689 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744715 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744806 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744835 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744861 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744885 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744910 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744933 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744957 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744982 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745023 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745049 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745071 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745092 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745116 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745137 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745178 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745209 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745234 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745258 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745282 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745304 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745327 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745347 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745368 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745392 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745414 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745443 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745473 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745500 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745525 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745552 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745576 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745599 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745671 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745696 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745724 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745748 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745769 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745791 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745813 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745835 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746039 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746064 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746091 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746117 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746139 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746179 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746204 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746257 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746284 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746308 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746331 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746353 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746375 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746401 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746425 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746452 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746477 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746503 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744690 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744739 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744820 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744845 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744304 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746594 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744901 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744922 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.744964 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745146 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745137 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745216 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745367 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745381 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745436 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745533 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745591 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745592 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745688 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745743 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.745811 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746092 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746122 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746237 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746254 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746363 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746502 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746724 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746516 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746751 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746527 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746781 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746635 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746846 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746869 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746959 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747016 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747123 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747214 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747227 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747302 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.747428 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:46:59.247400725 +0000 UTC m=+21.185859583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747467 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747475 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747921 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.747940 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.746529 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748012 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748049 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748086 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748108 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748118 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.748150 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749668 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749670 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749719 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749742 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749760 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749778 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749796 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749811 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749828 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749844 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749859 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749874 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749889 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749933 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749953 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749971 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.749988 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750005 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750023 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750021 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750041 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750059 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750077 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750094 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750112 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750130 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750147 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750177 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750194 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750210 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750228 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750246 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750264 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750279 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750295 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750313 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750328 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750344 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750362 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750378 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750393 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750412 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750428 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750448 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750467 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750484 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750500 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750515 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750531 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750548 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750564 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750579 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750595 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750610 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750627 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750644 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750661 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750678 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750696 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750713 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750731 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750747 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750763 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750780 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750796 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750812 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750829 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750845 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750864 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750881 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750896 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750913 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750929 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750945 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750961 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750977 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750995 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751011 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751025 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751043 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751059 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751078 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751096 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751114 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751157 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751196 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751217 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751240 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751258 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751281 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751300 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751317 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751334 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751354 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751371 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751391 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751407 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751428 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751496 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751508 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751518 4603 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751528 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751538 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751549 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751558 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751567 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751578 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751587 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751597 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751606 4603 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751617 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751626 4603 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751637 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751646 4603 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751656 4603 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751667 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751677 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751686 4603 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751707 4603 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751716 4603 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751727 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751737 4603 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751746 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751755 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751765 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751775 4603 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751785 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751795 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751805 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751815 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751824 4603 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751834 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751843 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751852 4603 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751862 4603 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751871 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751881 4603 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751890 4603 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751899 4603 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751909 4603 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751919 4603 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756777 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750421 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.782467 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.782851 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.782922 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.783052 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.783363 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750427 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.750440 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751030 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.751589 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752067 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752138 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752207 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752307 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752534 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752578 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752863 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.752915 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.753548 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.753593 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754027 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754035 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754156 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754229 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754371 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754440 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754601 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.754744 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.755476 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.755675 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756062 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756303 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756537 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756553 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756569 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.756737 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.757196 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.757615 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.758738 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.760596 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.760832 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.760968 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.761099 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.761191 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.761138 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.761507 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.761836 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.762111 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.762296 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.762370 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.762572 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764394 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764458 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764666 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764747 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764808 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.764952 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.765137 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.765190 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.765497 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.766740 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.767045 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.767276 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.769926 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770103 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770249 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770435 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770550 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770663 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770819 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.770846 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.771347 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.771503 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.771667 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.772544 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.773472 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.776562 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.776916 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.777153 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.777641 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.777869 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.778210 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.779409 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.780477 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.780547 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.780768 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.780786 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.783622 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.783863 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.784176 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.784204 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.784903 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.784936 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.785199 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.785260 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.785364 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.785562 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.785625 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.786246 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.786292 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.786638 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.786824 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.787086 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.787338 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.787627 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.787694 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.788206 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.788436 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.788548 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.788749 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.788793 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.789028 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.789064 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.789646 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.789946 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.790043 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.790442 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.790622 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.790639 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.790655 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.790687 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.790689 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.790715 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:46:59.290695957 +0000 UTC m=+21.229154775 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.791151 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.791566 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.791600 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.791756 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.792028 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.792592 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.793046 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.795018 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.795907 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.796201 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.806066 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.809233 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.815699 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.831380 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.837440 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.837470 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.837598 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:46:59.337579971 +0000 UTC m=+21.276038789 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.837950 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.838113 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:46:59.338093826 +0000 UTC m=+21.276552644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.838797 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.839328 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.839667 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.840240 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.840561 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.840723 4603 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.841288 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.820444 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.826513 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853897 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853956 4603 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853970 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853982 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.853995 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854006 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854017 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854029 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854040 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854051 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854062 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854072 4603 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854083 4603 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854094 4603 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854104 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854115 4603 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854126 4603 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854136 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854147 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854157 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854186 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854196 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854207 4603 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854218 4603 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854228 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854239 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854250 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854259 4603 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854268 4603 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854277 4603 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854287 4603 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854296 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854308 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854320 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854331 4603 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854342 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854354 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854365 4603 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854375 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854386 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854397 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854412 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854424 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854434 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854445 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854458 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854470 4603 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854482 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854494 4603 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854505 4603 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854516 4603 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854527 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854537 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854553 4603 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854563 4603 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854574 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854585 4603 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854596 4603 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854607 4603 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854618 4603 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854628 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854638 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854648 4603 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854659 4603 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854670 4603 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854681 4603 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854692 4603 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854705 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854716 4603 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854728 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854738 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854748 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854758 4603 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854771 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854782 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854793 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854804 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854816 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854827 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854838 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854848 4603 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854858 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854868 4603 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854879 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854890 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854900 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854911 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854922 4603 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854932 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854943 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854954 4603 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854963 4603 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.854988 4603 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855003 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855017 4603 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855029 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855040 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855051 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855061 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855072 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855083 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855093 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855104 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855115 4603 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855127 4603 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855137 4603 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855149 4603 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855246 4603 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855263 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855274 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855285 4603 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855295 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855306 4603 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855316 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855327 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855337 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855348 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855358 4603 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855370 4603 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855381 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855393 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855403 4603 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855415 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855425 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855436 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855445 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855456 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855466 4603 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855476 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855486 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855497 4603 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855508 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855518 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855528 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855539 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855551 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855561 4603 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855573 4603 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855584 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855594 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855604 4603 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855615 4603 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855625 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855636 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855648 4603 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855660 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855671 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855682 4603 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855694 4603 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.855739 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.856487 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.856557 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.858890 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.860180 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.861995 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.862668 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.863719 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.864704 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.865407 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.866403 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.866915 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.868240 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.868857 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.876942 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.870132 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.877267 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.877283 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:58 crc kubenswrapper[4603]: E0930 19:46:58.877327 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:46:59.377311602 +0000 UTC m=+21.315770420 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.876836 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.878524 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.878662 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.881642 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.884655 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.888308 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.890123 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.892091 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.893062 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.896026 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.898837 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.899475 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.900446 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.900890 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.901563 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.901596 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.909737 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.909982 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.910450 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.911077 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.913808 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.914641 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.915866 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.916533 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.917138 4603 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.917987 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.921097 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.922012 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.923893 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.926066 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.926795 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.928012 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.929131 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.929911 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.931917 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.932579 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.934464 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.936367 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.941597 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.942224 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.943539 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.943528 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.944216 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.946467 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.947033 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.948150 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.948943 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.949881 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.951425 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.952039 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.952965 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.958438 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.958477 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.958511 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.969808 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.980832 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:58 crc kubenswrapper[4603]: I0930 19:46:58.990741 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.000785 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.004113 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 19:46:59 crc kubenswrapper[4603]: W0930 19:46:59.016276 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-5ffa7539442485b9db3764e96281f7bc0a4f66b4c3d83fc6f271026a0ee62f55 WatchSource:0}: Error finding container 5ffa7539442485b9db3764e96281f7bc0a4f66b4c3d83fc6f271026a0ee62f55: Status 404 returned error can't find the container with id 5ffa7539442485b9db3764e96281f7bc0a4f66b4c3d83fc6f271026a0ee62f55 Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.016489 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.017596 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 19:46:59 crc kubenswrapper[4603]: W0930 19:46:59.027745 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-626816454dad8ede581bb7047082600836637e81d34ead5f82cf34f2c0918aad WatchSource:0}: Error finding container 626816454dad8ede581bb7047082600836637e81d34ead5f82cf34f2c0918aad: Status 404 returned error can't find the container with id 626816454dad8ede581bb7047082600836637e81d34ead5f82cf34f2c0918aad Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.028284 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.037368 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.040920 4603 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.260378 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.260516 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:00.260484923 +0000 UTC m=+22.198943741 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.361227 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.361266 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.361289 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361385 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361443 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:00.361429216 +0000 UTC m=+22.299888024 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361504 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361515 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361525 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361546 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:00.361539959 +0000 UTC m=+22.299998777 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361590 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.361609 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:00.361603501 +0000 UTC m=+22.300062319 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.462254 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.462426 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.462456 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.462468 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:59 crc kubenswrapper[4603]: E0930 19:46:59.462527 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:00.462511462 +0000 UTC m=+22.400970280 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.906204 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.906906 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.908922 4603 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5" exitCode=255 Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.909006 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.909061 4603 scope.go:117] "RemoveContainer" containerID="acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.911944 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.911984 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"69bcaba0b8ffd9855c3793e4004bc9dcf260e42ac44ceef4fb7487dd9321384b"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.913328 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"626816454dad8ede581bb7047082600836637e81d34ead5f82cf34f2c0918aad"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.914803 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.914826 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.914836 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5ffa7539442485b9db3764e96281f7bc0a4f66b4c3d83fc6f271026a0ee62f55"} Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.937760 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:46:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.954551 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:46:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.968661 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-gn9pm"] Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.968955 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.970695 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.970849 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.971321 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 19:46:59 crc kubenswrapper[4603]: I0930 19:46:59.979433 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:46:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.002346 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:46:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.022807 4603 scope.go:117] "RemoveContainer" containerID="9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.023226 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.024574 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.033276 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.067025 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw8d8\" (UniqueName: \"kubernetes.io/projected/30de54b5-7eba-4480-971f-be5bf196d8b3-kube-api-access-qw8d8\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.067108 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/30de54b5-7eba-4480-971f-be5bf196d8b3-hosts-file\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.069553 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.096333 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:52Z\\\",\\\"message\\\":\\\"W0930 19:46:41.977082 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:46:41.977505 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759261601 cert, and key in /tmp/serving-cert-4045443874/serving-signer.crt, /tmp/serving-cert-4045443874/serving-signer.key\\\\nI0930 19:46:42.241095 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:46:42.243915 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:46:42.244113 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:46:42.246278 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4045443874/tls.crt::/tmp/serving-cert-4045443874/tls.key\\\\\\\"\\\\nF0930 19:46:52.611987 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.113963 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.128188 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.140346 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.152774 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.163715 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.167442 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/30de54b5-7eba-4480-971f-be5bf196d8b3-hosts-file\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.167518 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw8d8\" (UniqueName: \"kubernetes.io/projected/30de54b5-7eba-4480-971f-be5bf196d8b3-kube-api-access-qw8d8\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.167786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/30de54b5-7eba-4480-971f-be5bf196d8b3-hosts-file\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.175717 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.230790 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.234995 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw8d8\" (UniqueName: \"kubernetes.io/projected/30de54b5-7eba-4480-971f-be5bf196d8b3-kube-api-access-qw8d8\") pod \"node-resolver-gn9pm\" (UID: \"30de54b5-7eba-4480-971f-be5bf196d8b3\") " pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.268799 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.268979 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:02.268939347 +0000 UTC m=+24.207398165 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.279924 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gn9pm" Sep 30 19:47:00 crc kubenswrapper[4603]: W0930 19:47:00.291677 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30de54b5_7eba_4480_971f_be5bf196d8b3.slice/crio-718a116315d723b5e8885e9d6ed675379e751f2feb653fabe630a433d65a9408 WatchSource:0}: Error finding container 718a116315d723b5e8885e9d6ed675379e751f2feb653fabe630a433d65a9408: Status 404 returned error can't find the container with id 718a116315d723b5e8885e9d6ed675379e751f2feb653fabe630a433d65a9408 Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.370049 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.370115 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.370140 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370234 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370291 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:02.370274281 +0000 UTC m=+24.308733099 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370373 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370401 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:02.370392604 +0000 UTC m=+24.308851412 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370471 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370484 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370497 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.370529 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:02.370519967 +0000 UTC m=+24.308978785 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.396328 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-6sgvc"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.396605 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.404180 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.404553 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.404751 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.404940 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.409257 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.411914 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-g8q5x"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.413776 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.420674 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.420715 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.420863 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.420940 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.420993 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.434321 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:52Z\\\",\\\"message\\\":\\\"W0930 19:46:41.977082 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:46:41.977505 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759261601 cert, and key in /tmp/serving-cert-4045443874/serving-signer.crt, /tmp/serving-cert-4045443874/serving-signer.key\\\\nI0930 19:46:42.241095 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:46:42.243915 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:46:42.244113 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:46:42.246278 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4045443874/tls.crt::/tmp/serving-cert-4045443874/tls.key\\\\\\\"\\\\nF0930 19:46:52.611987 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.454357 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470853 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cni-binary-copy\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470887 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3adf7280-9c4a-403e-8605-b5e5897f3521-proxy-tls\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470903 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3adf7280-9c4a-403e-8605-b5e5897f3521-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470922 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-conf-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470942 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-system-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470957 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cnibin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470982 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-os-release\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.470997 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-k8s-cni-cncf-io\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471159 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-netns\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471257 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-daemon-config\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471279 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-multus-certs\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471303 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d852n\" (UniqueName: \"kubernetes.io/projected/3adf7280-9c4a-403e-8605-b5e5897f3521-kube-api-access-d852n\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471332 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471348 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-socket-dir-parent\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471384 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471409 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-bin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471441 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-multus\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.471519 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.471537 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.471548 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471584 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-kubelet\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.471624 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:02.471608714 +0000 UTC m=+24.410067532 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471684 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3adf7280-9c4a-403e-8605-b5e5897f3521-rootfs\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471738 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-hostroot\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471757 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-etc-kubernetes\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.471774 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nsnc\" (UniqueName: \"kubernetes.io/projected/d0d17316-8ee1-4df6-98b6-eefa64f035d9-kube-api-access-8nsnc\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.479975 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.495632 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.511839 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.522029 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.537681 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.554019 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.567024 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.572890 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-bin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.572938 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-multus\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.572962 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-kubelet\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.572982 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3adf7280-9c4a-403e-8605-b5e5897f3521-rootfs\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573004 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-hostroot\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573026 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-etc-kubernetes\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573047 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nsnc\" (UniqueName: \"kubernetes.io/projected/d0d17316-8ee1-4df6-98b6-eefa64f035d9-kube-api-access-8nsnc\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573077 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cni-binary-copy\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573095 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3adf7280-9c4a-403e-8605-b5e5897f3521-proxy-tls\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573117 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3adf7280-9c4a-403e-8605-b5e5897f3521-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573186 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-conf-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573206 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-system-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573225 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cnibin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573243 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-os-release\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573267 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-k8s-cni-cncf-io\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573300 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-netns\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573321 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-daemon-config\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573341 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-multus-certs\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573361 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d852n\" (UniqueName: \"kubernetes.io/projected/3adf7280-9c4a-403e-8605-b5e5897f3521-kube-api-access-d852n\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573381 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573402 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-socket-dir-parent\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573495 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-socket-dir-parent\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573541 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-bin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573573 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-cni-multus\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573603 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-var-lib-kubelet\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573636 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3adf7280-9c4a-403e-8605-b5e5897f3521-rootfs\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573667 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-hostroot\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.573696 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-etc-kubernetes\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.574693 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cni-binary-copy\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575133 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-k8s-cni-cncf-io\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575238 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-cnibin\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575261 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-multus-certs\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575290 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-host-run-netns\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575375 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-system-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575526 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-conf-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575538 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-os-release\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.575686 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-cni-dir\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.576013 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d0d17316-8ee1-4df6-98b6-eefa64f035d9-multus-daemon-config\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.576066 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3adf7280-9c4a-403e-8605-b5e5897f3521-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.578716 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3adf7280-9c4a-403e-8605-b5e5897f3521-proxy-tls\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.583986 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.591741 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d852n\" (UniqueName: \"kubernetes.io/projected/3adf7280-9c4a-403e-8605-b5e5897f3521-kube-api-access-d852n\") pod \"machine-config-daemon-g8q5x\" (UID: \"3adf7280-9c4a-403e-8605-b5e5897f3521\") " pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.591817 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nsnc\" (UniqueName: \"kubernetes.io/projected/d0d17316-8ee1-4df6-98b6-eefa64f035d9-kube-api-access-8nsnc\") pod \"multus-6sgvc\" (UID: \"d0d17316-8ee1-4df6-98b6-eefa64f035d9\") " pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.598684 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.607792 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.618608 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.623071 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.624725 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.628724 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.644838 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.658879 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:52Z\\\",\\\"message\\\":\\\"W0930 19:46:41.977082 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:46:41.977505 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759261601 cert, and key in /tmp/serving-cert-4045443874/serving-signer.crt, /tmp/serving-cert-4045443874/serving-signer.key\\\\nI0930 19:46:42.241095 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:46:42.243915 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:46:42.244113 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:46:42.246278 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4045443874/tls.crt::/tmp/serving-cert-4045443874/tls.key\\\\\\\"\\\\nF0930 19:46:52.611987 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.670000 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.681097 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.691410 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.705268 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.716942 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:52Z\\\",\\\"message\\\":\\\"W0930 19:46:41.977082 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:46:41.977505 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759261601 cert, and key in /tmp/serving-cert-4045443874/serving-signer.crt, /tmp/serving-cert-4045443874/serving-signer.key\\\\nI0930 19:46:42.241095 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:46:42.243915 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:46:42.244113 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:46:42.246278 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4045443874/tls.crt::/tmp/serving-cert-4045443874/tls.key\\\\\\\"\\\\nF0930 19:46:52.611987 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.724956 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-6sgvc" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.729241 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.730371 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:47:00 crc kubenswrapper[4603]: W0930 19:47:00.739993 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3adf7280_9c4a_403e_8605_b5e5897f3521.slice/crio-0842860c27bb38f73df2a932d7e7b5f49b227f2391b0e5a15ba3f7ad079f058d WatchSource:0}: Error finding container 0842860c27bb38f73df2a932d7e7b5f49b227f2391b0e5a15ba3f7ad079f058d: Status 404 returned error can't find the container with id 0842860c27bb38f73df2a932d7e7b5f49b227f2391b0e5a15ba3f7ad079f058d Sep 30 19:47:00 crc kubenswrapper[4603]: W0930 19:47:00.741264 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0d17316_8ee1_4df6_98b6_eefa64f035d9.slice/crio-0156b3ca77ca4f49631020eef3fd02a8419caedfa4d3e92afddbec6e101753d4 WatchSource:0}: Error finding container 0156b3ca77ca4f49631020eef3fd02a8419caedfa4d3e92afddbec6e101753d4: Status 404 returned error can't find the container with id 0156b3ca77ca4f49631020eef3fd02a8419caedfa4d3e92afddbec6e101753d4 Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.747025 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.764206 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.764254 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.764307 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.764379 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.764512 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.764567 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.769075 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.781394 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.808243 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.812679 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-xwttq"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.813256 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.815861 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.822264 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-blpqj"] Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.822965 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.823269 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.825538 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.826238 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.826714 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.826775 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.827365 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.827382 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.827863 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.828459 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.837539 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.856504 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.873695 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875208 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875236 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875254 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875269 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875284 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-cnibin\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875307 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875321 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875335 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875349 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875364 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xbtf\" (UniqueName: \"kubernetes.io/projected/3b53f70a-60cb-4928-95ef-836c0e4170a5-kube-api-access-9xbtf\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875379 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875395 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875409 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875425 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875440 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875460 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875473 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875486 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875501 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875516 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875539 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-os-release\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875553 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875574 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875586 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875601 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875617 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-system-cni-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.875632 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkhtc\" (UniqueName: \"kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.885908 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.898314 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.909125 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.921022 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.921100 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"0842860c27bb38f73df2a932d7e7b5f49b227f2391b0e5a15ba3f7ad079f058d"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.922117 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerStarted","Data":"bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.922177 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerStarted","Data":"0156b3ca77ca4f49631020eef3fd02a8419caedfa4d3e92afddbec6e101753d4"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.923369 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gn9pm" event={"ID":"30de54b5-7eba-4480-971f-be5bf196d8b3","Type":"ContainerStarted","Data":"da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.923392 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gn9pm" event={"ID":"30de54b5-7eba-4480-971f-be5bf196d8b3","Type":"ContainerStarted","Data":"718a116315d723b5e8885e9d6ed675379e751f2feb653fabe630a433d65a9408"} Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.926427 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.928495 4603 scope.go:117] "RemoveContainer" containerID="9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5" Sep 30 19:47:00 crc kubenswrapper[4603]: E0930 19:47:00.928609 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.932756 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.967593 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976448 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976485 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976503 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976520 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xbtf\" (UniqueName: \"kubernetes.io/projected/3b53f70a-60cb-4928-95ef-836c0e4170a5-kube-api-access-9xbtf\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976543 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976556 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976573 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976589 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976608 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976623 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976637 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976654 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-os-release\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976679 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976692 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976707 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976727 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976759 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkhtc\" (UniqueName: \"kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976774 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-system-cni-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976797 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976820 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976833 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976848 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976861 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976877 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976892 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-cnibin\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976922 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.976935 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.977556 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.977605 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.977626 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.977646 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978009 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978038 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978059 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978111 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978133 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978154 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978191 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.978328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-os-release\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979190 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979371 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979390 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979591 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979659 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-system-cni-dir\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979687 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979702 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979712 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3b53f70a-60cb-4928-95ef-836c0e4170a5-cnibin\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979739 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.979766 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.980216 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3b53f70a-60cb-4928-95ef-836c0e4170a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.980220 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.981366 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.988615 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:00Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:00 crc kubenswrapper[4603]: I0930 19:47:00.995887 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkhtc\" (UniqueName: \"kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc\") pod \"ovnkube-node-blpqj\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.004478 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.009679 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xbtf\" (UniqueName: \"kubernetes.io/projected/3b53f70a-60cb-4928-95ef-836c0e4170a5-kube-api-access-9xbtf\") pod \"multus-additional-cni-plugins-xwttq\" (UID: \"3b53f70a-60cb-4928-95ef-836c0e4170a5\") " pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.027324 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.041789 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acb9735ef44f25cfae58eb0fa9417af3e279bcc47038a0b81b2b07c9c44e1990\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:52Z\\\",\\\"message\\\":\\\"W0930 19:46:41.977082 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 19:46:41.977505 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759261601 cert, and key in /tmp/serving-cert-4045443874/serving-signer.crt, /tmp/serving-cert-4045443874/serving-signer.key\\\\nI0930 19:46:42.241095 1 observer_polling.go:159] Starting file observer\\\\nW0930 19:46:42.243915 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 19:46:42.244113 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 19:46:42.246278 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4045443874/tls.crt::/tmp/serving-cert-4045443874/tls.key\\\\\\\"\\\\nF0930 19:46:52.611987 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.058558 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.070939 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.085591 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.101588 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.120686 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.122642 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xwttq" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.133976 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:01 crc kubenswrapper[4603]: W0930 19:47:01.134343 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b53f70a_60cb_4928_95ef_836c0e4170a5.slice/crio-84ee9f57800f0c76622b4fcbc07e2be811bad493a023f334571ea55afe2d5b63 WatchSource:0}: Error finding container 84ee9f57800f0c76622b4fcbc07e2be811bad493a023f334571ea55afe2d5b63: Status 404 returned error can't find the container with id 84ee9f57800f0c76622b4fcbc07e2be811bad493a023f334571ea55afe2d5b63 Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.138573 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: W0930 19:47:01.144502 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod543e574d_42bb_453f_ade2_2e9b5904a3d3.slice/crio-82ef2d703e3e16d073fb16b641ebb1ce606e5c0797d94d6d0fccafa2b8eb230a WatchSource:0}: Error finding container 82ef2d703e3e16d073fb16b641ebb1ce606e5c0797d94d6d0fccafa2b8eb230a: Status 404 returned error can't find the container with id 82ef2d703e3e16d073fb16b641ebb1ce606e5c0797d94d6d0fccafa2b8eb230a Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.157864 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.170373 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.194383 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.208894 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.234776 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.252791 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.268415 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.285172 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.300341 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.318686 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.330503 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.354430 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.933093 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.935635 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a" exitCode=0 Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.935707 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.935728 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerStarted","Data":"84ee9f57800f0c76622b4fcbc07e2be811bad493a023f334571ea55afe2d5b63"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.937994 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.939943 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" exitCode=0 Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.939990 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.940024 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"82ef2d703e3e16d073fb16b641ebb1ce606e5c0797d94d6d0fccafa2b8eb230a"} Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.957426 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.983085 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:01 crc kubenswrapper[4603]: I0930 19:47:01.996244 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.013364 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.099755 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.127363 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.138199 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.148339 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.160969 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.172991 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.183786 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.199194 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.211844 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.225859 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.239862 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.255298 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.269205 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.282656 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.288042 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.288317 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:06.288299833 +0000 UTC m=+28.226758661 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.297522 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.312010 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.325754 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.343070 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.360813 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.382007 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.389489 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.389559 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.389623 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.389721 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.389780 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:06.389760701 +0000 UTC m=+28.328219539 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390349 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390381 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390396 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390414 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390489 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:06.390474641 +0000 UTC m=+28.328933479 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.390508 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:06.390500201 +0000 UTC m=+28.328959039 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.406867 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.424055 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.490590 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.490748 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.490767 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.490781 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.490834 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:06.490818346 +0000 UTC m=+28.429277174 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.761028 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-s4x8t"] Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.761919 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.764410 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.764684 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.764824 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.764951 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.765649 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.765673 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.765806 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.765983 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.766240 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:02 crc kubenswrapper[4603]: E0930 19:47:02.766357 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.793516 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.796734 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4cc0db5c-2d9a-433f-8c96-48bb418919bc-host\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.796795 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4cc0db5c-2d9a-433f-8c96-48bb418919bc-serviceca\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.796819 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j772b\" (UniqueName: \"kubernetes.io/projected/4cc0db5c-2d9a-433f-8c96-48bb418919bc-kube-api-access-j772b\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.819699 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.847363 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.867598 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.881774 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.894004 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.897573 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4cc0db5c-2d9a-433f-8c96-48bb418919bc-host\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.897626 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4cc0db5c-2d9a-433f-8c96-48bb418919bc-serviceca\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.897657 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j772b\" (UniqueName: \"kubernetes.io/projected/4cc0db5c-2d9a-433f-8c96-48bb418919bc-kube-api-access-j772b\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.897652 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4cc0db5c-2d9a-433f-8c96-48bb418919bc-host\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.898633 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/4cc0db5c-2d9a-433f-8c96-48bb418919bc-serviceca\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.907987 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.914843 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j772b\" (UniqueName: \"kubernetes.io/projected/4cc0db5c-2d9a-433f-8c96-48bb418919bc-kube-api-access-j772b\") pod \"node-ca-s4x8t\" (UID: \"4cc0db5c-2d9a-433f-8c96-48bb418919bc\") " pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.927492 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.940662 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.946513 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerStarted","Data":"2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.949645 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.949691 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.949700 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.949709 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.949718 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.958049 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.972599 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.986036 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:02 crc kubenswrapper[4603]: I0930 19:47:02.996668 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.018700 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.040395 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.082109 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.086296 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-s4x8t" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.120975 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.161759 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.206419 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.244331 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.278197 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.322296 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.361135 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.399626 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.440607 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.482202 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.523021 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.563479 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.955206 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9" exitCode=0 Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.955282 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9"} Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.964839 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.967589 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-s4x8t" event={"ID":"4cc0db5c-2d9a-433f-8c96-48bb418919bc","Type":"ContainerStarted","Data":"b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd"} Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.967639 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-s4x8t" event={"ID":"4cc0db5c-2d9a-433f-8c96-48bb418919bc","Type":"ContainerStarted","Data":"e828db26dc284b71cf7ed748ab2f7b14a3f960099ebc0d96087b8ae425acea0e"} Sep 30 19:47:03 crc kubenswrapper[4603]: I0930 19:47:03.981954 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.009119 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.035104 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.051861 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.067765 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.084446 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.104280 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.118903 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.132898 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.146586 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.164342 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.179475 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.196135 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.211338 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.227585 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.244829 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.260711 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.285290 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.326320 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.361076 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.407200 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.439782 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.480486 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.521557 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.564195 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.602745 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.644158 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.681866 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.763658 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.763757 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.764132 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:04 crc kubenswrapper[4603]: E0930 19:47:04.764340 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:04 crc kubenswrapper[4603]: E0930 19:47:04.764536 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:04 crc kubenswrapper[4603]: E0930 19:47:04.764809 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.975266 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2" exitCode=0 Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.975364 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2"} Sep 30 19:47:04 crc kubenswrapper[4603]: I0930 19:47:04.995896 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:04Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.009595 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.032098 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.041925 4603 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.044315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.044488 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.044503 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.044671 4603 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.048542 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.053613 4603 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.053877 4603 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.055066 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.055136 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.055153 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.055201 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.055218 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.070331 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.071515 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.078427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.078465 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.078476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.078492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.078504 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.089204 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.095800 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.099197 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.099244 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.099256 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.099275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.099288 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.106678 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.114810 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.122692 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.122775 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.122798 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.122824 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.122841 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.128429 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.139390 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.142339 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.143263 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.143296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.143307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.143324 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.143335 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.155338 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: E0930 19:47:05.155487 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.155503 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.159417 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.159456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.159469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.159487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.159498 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.166871 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.208434 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.238681 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.262105 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.262135 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.262145 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.262189 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.262201 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.282374 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:05Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.364061 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.364104 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.364119 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.364137 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.364150 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.466785 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.466818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.466826 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.466840 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.466850 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.570898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.570967 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.570983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.571009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.571029 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.674509 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.674550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.674562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.674578 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.674590 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.777047 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.777083 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.777093 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.777109 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.777121 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.881150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.881209 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.881219 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.881234 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.881245 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.984050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.984129 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.984150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.984210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.984652 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:05Z","lastTransitionTime":"2025-09-30T19:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.988297 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.993277 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916" exitCode=0 Sep 30 19:47:05 crc kubenswrapper[4603]: I0930 19:47:05.993337 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.011960 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.033106 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.049570 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.068148 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.085487 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.087571 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.087628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.087655 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.087685 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.087704 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.110472 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.122618 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.140040 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.152244 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.164507 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.178207 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.189863 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.189902 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.189915 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.189931 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.189943 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.190573 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.205295 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.218976 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:06Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.292970 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.293018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.293032 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.293051 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.293091 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.334460 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.334801 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:14.334724795 +0000 UTC m=+36.273183653 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.396056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.396095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.396106 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.396125 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.396137 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.435792 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.435879 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.435959 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436071 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436150 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:14.43613295 +0000 UTC m=+36.374591788 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436491 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436571 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436591 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436665 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.436669 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:14.436647675 +0000 UTC m=+36.375106523 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.437087 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:14.437033966 +0000 UTC m=+36.375492804 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.499856 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.499928 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.499941 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.499962 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.500015 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.536980 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.537296 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.537333 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.537396 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.538114 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:14.537469324 +0000 UTC m=+36.475928162 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.604422 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.604465 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.604478 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.604493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.604505 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.706210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.706274 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.706286 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.706301 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.706312 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.763264 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.763275 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.763401 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.763433 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.763290 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.763764 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.810035 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.810114 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.810131 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.810201 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.810221 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.888067 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.889156 4603 scope.go:117] "RemoveContainer" containerID="9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5" Sep 30 19:47:06 crc kubenswrapper[4603]: E0930 19:47:06.889588 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.916562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.916652 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.916662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.917307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:06 crc kubenswrapper[4603]: I0930 19:47:06.917328 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:06Z","lastTransitionTime":"2025-09-30T19:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.004204 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerStarted","Data":"814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.020393 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.020442 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.020467 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.020483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.020493 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.027306 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.042617 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.055397 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.069885 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.080105 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.096908 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.111730 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.123821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.123905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.123920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.123949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.123962 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.128671 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.147061 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.166283 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.184190 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.213467 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.225476 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.226797 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.226858 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.226895 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.226917 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.226929 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.243823 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.332004 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.332394 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.332409 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.332428 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.332443 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.434820 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.434846 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.434856 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.434870 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.434879 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.540828 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.540872 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.540883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.540897 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.540907 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.643958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.644017 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.644037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.644061 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.644080 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.747076 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.747124 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.747136 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.747154 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.747187 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.850235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.850337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.850361 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.851265 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.851283 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.953825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.953869 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.953880 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.953914 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:07 crc kubenswrapper[4603]: I0930 19:47:07.953929 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:07Z","lastTransitionTime":"2025-09-30T19:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.009560 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7" exitCode=0 Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.009625 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.014012 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.014520 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.014573 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.024836 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.052259 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.053845 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.058969 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.059006 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.059049 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.059066 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.059076 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.065962 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.072965 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.089776 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.113607 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.123424 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.134125 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.150383 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.161144 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.161199 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.161211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.161227 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.161242 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.167283 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.186390 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.198877 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.216064 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.228595 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.246716 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.259017 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.263771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.263808 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.263820 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.263837 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.263846 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.273083 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.286291 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.306093 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.318105 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.330749 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.347984 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.358030 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.365859 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.365906 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.365917 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.365953 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.365962 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.370404 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.380233 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.399016 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.410843 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.423206 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.434812 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.469348 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.469388 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.469405 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.469420 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.469429 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.571983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.572053 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.572265 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.572282 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.572293 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.673899 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.673958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.673970 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.673987 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.674000 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.763495 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:08 crc kubenswrapper[4603]: E0930 19:47:08.763664 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.763726 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.763530 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:08 crc kubenswrapper[4603]: E0930 19:47:08.763879 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:08 crc kubenswrapper[4603]: E0930 19:47:08.763977 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.776515 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.776574 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.776592 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.776615 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.776633 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.788953 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.818317 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.839891 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.859928 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.879045 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.879084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.879095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.879110 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.879123 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.892032 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.905626 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.922998 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.940423 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.954998 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.970310 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.980947 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.980990 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.981000 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.981016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.981025 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:08Z","lastTransitionTime":"2025-09-30T19:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:08 crc kubenswrapper[4603]: I0930 19:47:08.985653 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.006388 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.019092 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b53f70a-60cb-4928-95ef-836c0e4170a5" containerID="936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d" exitCode=0 Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.019185 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerDied","Data":"936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.019259 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.022120 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.038454 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.054685 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.068618 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.081688 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.083687 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.083738 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.083748 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.083766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.083818 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.090990 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.107132 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.120971 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.136997 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.150675 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.175702 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.187549 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.187588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.187602 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.187622 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.187637 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.192386 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.203613 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.215719 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.233665 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.245368 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.289951 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.290137 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.290349 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.290446 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.290535 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.393791 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.393847 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.393871 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.393898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.393918 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.496055 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.496150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.496215 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.496245 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.496269 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.598968 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.599033 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.599056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.599086 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.599105 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.703010 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.703063 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.703079 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.703100 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.703117 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.805919 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.805979 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.805997 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.806020 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.806038 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.909053 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.909123 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.909135 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.909157 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:09 crc kubenswrapper[4603]: I0930 19:47:09.909189 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:09Z","lastTransitionTime":"2025-09-30T19:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.011528 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.011569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.011581 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.011597 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.011608 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.029772 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" event={"ID":"3b53f70a-60cb-4928-95ef-836c0e4170a5","Type":"ContainerStarted","Data":"d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.029851 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.061118 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.071550 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.084273 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.095597 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.106252 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.113578 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.113626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.113634 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.113648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.113658 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.119378 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.129373 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.142313 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.154485 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.167734 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.184317 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.199005 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.212245 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.215192 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.215229 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.215237 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.215252 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.215262 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.223157 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:10Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.317348 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.317389 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.317397 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.317411 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.317420 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.420221 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.420250 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.420258 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.420270 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.420279 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.523118 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.523199 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.523214 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.523230 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.523243 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.625202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.625232 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.625242 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.625255 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.625264 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.727961 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.728034 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.728048 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.728064 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.728076 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.763649 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:10 crc kubenswrapper[4603]: E0930 19:47:10.763759 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.764228 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:10 crc kubenswrapper[4603]: E0930 19:47:10.764290 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.764427 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:10 crc kubenswrapper[4603]: E0930 19:47:10.764484 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.831077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.831116 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.831128 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.831146 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.831158 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.933340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.933390 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.933408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.933431 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:10 crc kubenswrapper[4603]: I0930 19:47:10.933448 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:10Z","lastTransitionTime":"2025-09-30T19:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.036106 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.036157 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.036204 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.036226 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.036242 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.037795 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/0.log" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.042269 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263" exitCode=1 Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.042321 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.043901 4603 scope.go:117] "RemoveContainer" containerID="8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.068756 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.092817 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.108484 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.127610 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.142570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.142615 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.142629 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.142649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.142685 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.146536 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.174027 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.193389 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.208669 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.229261 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.245203 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.245228 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.245236 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.245249 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.245257 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.247605 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.271515 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.288890 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.312044 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.321784 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:11Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.347445 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.347481 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.347492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.347510 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.347522 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.450593 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.450620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.450627 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.450639 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.450648 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.552665 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.552711 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.552723 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.552738 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.552750 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.655501 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.655544 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.655556 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.655575 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.655587 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.758771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.758826 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.758838 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.758860 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.758872 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.860920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.861001 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.861016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.861049 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.861060 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.963468 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.963517 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.963535 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.963557 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:11 crc kubenswrapper[4603]: I0930 19:47:11.963574 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:11Z","lastTransitionTime":"2025-09-30T19:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.048181 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/0.log" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.050874 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.051045 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.064140 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.066002 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.066056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.066073 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.066096 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.066114 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.078110 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.089849 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.104769 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.116622 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.129946 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.142527 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.155526 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.168515 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.168569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.168585 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.168609 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.168626 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.169544 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.185556 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.203886 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.211492 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.221778 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.233024 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.270941 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.270976 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.270983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.270996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.271005 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.373555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.373907 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.374095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.374371 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.374564 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.477315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.477362 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.477379 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.477395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.477405 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.581218 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.581281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.581300 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.581325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.581342 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.685363 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.685430 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.685447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.685474 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.685493 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.763741 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.763788 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:12 crc kubenswrapper[4603]: E0930 19:47:12.763955 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:12 crc kubenswrapper[4603]: E0930 19:47:12.764068 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.763787 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:12 crc kubenswrapper[4603]: E0930 19:47:12.764441 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.789324 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.789395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.789424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.789451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.789468 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.893291 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.893349 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.893368 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.893391 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.893413 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.945359 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb"] Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.946094 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.948907 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.949399 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.975054 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.996713 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.996769 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.996786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.996809 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.996827 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:12Z","lastTransitionTime":"2025-09-30T19:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:12 crc kubenswrapper[4603]: I0930 19:47:12.997840 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:12Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.003026 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m2s4\" (UniqueName: \"kubernetes.io/projected/bbfca2a3-f179-4b9f-b207-be198308366b-kube-api-access-2m2s4\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.003130 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bbfca2a3-f179-4b9f-b207-be198308366b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.003250 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.003323 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.043848 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.055498 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/1.log" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.057559 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/0.log" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.061206 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8" exitCode=1 Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.061252 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.061290 4603 scope.go:117] "RemoveContainer" containerID="8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.062241 4603 scope.go:117] "RemoveContainer" containerID="ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8" Sep 30 19:47:13 crc kubenswrapper[4603]: E0930 19:47:13.062437 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.080268 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.099584 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.099628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.099641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.099660 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.099673 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.103972 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.104042 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m2s4\" (UniqueName: \"kubernetes.io/projected/bbfca2a3-f179-4b9f-b207-be198308366b-kube-api-access-2m2s4\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.104075 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bbfca2a3-f179-4b9f-b207-be198308366b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.104090 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.104655 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.104907 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bbfca2a3-f179-4b9f-b207-be198308366b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.109293 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bbfca2a3-f179-4b9f-b207-be198308366b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.109633 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.124618 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.124766 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m2s4\" (UniqueName: \"kubernetes.io/projected/bbfca2a3-f179-4b9f-b207-be198308366b-kube-api-access-2m2s4\") pod \"ovnkube-control-plane-749d76644c-r7dxb\" (UID: \"bbfca2a3-f179-4b9f-b207-be198308366b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.144313 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.157371 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.169337 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.179271 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.187894 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.202873 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.202950 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.202964 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.202982 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.202996 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.204237 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.222657 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.235833 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.247049 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.262131 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.270814 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.286677 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: W0930 19:47:13.288776 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbfca2a3_f179_4b9f_b207_be198308366b.slice/crio-e748afc5c0c2057373c526b25be65978c5ff90735a2de779af8ba76d203bc246 WatchSource:0}: Error finding container e748afc5c0c2057373c526b25be65978c5ff90735a2de779af8ba76d203bc246: Status 404 returned error can't find the container with id e748afc5c0c2057373c526b25be65978c5ff90735a2de779af8ba76d203bc246 Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.304941 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.304997 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.305009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.305028 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.305041 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.308015 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.318142 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.331762 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.344444 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.356562 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.370211 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.381925 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.392355 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.405921 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.408831 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.408888 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.408895 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.408915 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.408924 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.415975 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.427024 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.452063 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.465549 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:13Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.511697 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.511740 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.511752 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.511766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.511777 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.616264 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.616315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.616328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.616344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.616357 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.719613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.719647 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.719657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.719672 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.719682 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.821635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.821675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.821682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.821696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.821708 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.924515 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.924563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.924574 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.924590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:13 crc kubenswrapper[4603]: I0930 19:47:13.924603 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:13Z","lastTransitionTime":"2025-09-30T19:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.028658 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.028730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.028754 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.028803 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.028827 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.067299 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/1.log" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.074693 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" event={"ID":"bbfca2a3-f179-4b9f-b207-be198308366b","Type":"ContainerStarted","Data":"e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.074769 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" event={"ID":"bbfca2a3-f179-4b9f-b207-be198308366b","Type":"ContainerStarted","Data":"f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.074797 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" event={"ID":"bbfca2a3-f179-4b9f-b207-be198308366b","Type":"ContainerStarted","Data":"e748afc5c0c2057373c526b25be65978c5ff90735a2de779af8ba76d203bc246"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.102989 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.127253 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.131159 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.131285 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.131310 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.131339 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.131362 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.146221 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.165225 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.190107 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.208778 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.227817 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.233962 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.234023 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.234046 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.234073 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.234089 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.260359 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.283480 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.314337 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.335850 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.337194 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.337279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.337297 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.337328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.337348 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.356350 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.373657 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.395560 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.415610 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.415899 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.415871655 +0000 UTC m=+52.354330513 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.417257 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.440758 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.440833 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.440858 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.440887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.440906 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.516969 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.517042 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.517083 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517299 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517325 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517344 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517343 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517365 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517415 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.517394274 +0000 UTC m=+52.455853132 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517465 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.517437795 +0000 UTC m=+52.455896643 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.517494 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.517481696 +0000 UTC m=+52.455940554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.543983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.544035 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.544052 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.544075 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.544095 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.618098 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.618417 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.618458 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.618484 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.618581 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.618552212 +0000 UTC m=+52.557011070 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.647570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.647635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.647651 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.647676 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.647693 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.750626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.750680 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.750695 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.750718 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.750735 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.763745 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.763798 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.763911 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.764003 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.764117 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.764225 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.853577 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.853642 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.853658 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.853682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.853699 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.861944 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-pwrc5"] Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.862582 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:14 crc kubenswrapper[4603]: E0930 19:47:14.862660 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.881160 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.897786 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.915842 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.930970 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.949735 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.955807 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.955847 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.955859 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.955876 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.955889 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:14Z","lastTransitionTime":"2025-09-30T19:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.981593 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f79e6abcb3a67631d3aef6e56d0b10349156a723895f19eafd2fd79b646d263\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"message\\\":\\\"19:47:10.639502 5795 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.639557 5795 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639633 5795 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639774 5795 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.639791 5795 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:10.640019 5795 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0930 19:47:10.640288 5795 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:14 crc kubenswrapper[4603]: I0930 19:47:14.997505 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:14Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.015023 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.023124 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27lrl\" (UniqueName: \"kubernetes.io/projected/895a054c-b0e6-418a-9e96-b941b6e1946d-kube-api-access-27lrl\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.023308 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.035618 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.051986 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.058412 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.058492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.058505 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.058523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.058536 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.072861 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.090901 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.109497 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.124299 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27lrl\" (UniqueName: \"kubernetes.io/projected/895a054c-b0e6-418a-9e96-b941b6e1946d-kube-api-access-27lrl\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.124680 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.124904 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.124996 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:15.624969241 +0000 UTC m=+37.563428069 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.125030 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.147450 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.157373 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27lrl\" (UniqueName: \"kubernetes.io/projected/895a054c-b0e6-418a-9e96-b941b6e1946d-kube-api-access-27lrl\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.161951 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.162206 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.162325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.162418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.162505 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.173464 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.265678 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.266061 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.266077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.266099 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.266116 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.369022 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.369075 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.369092 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.369115 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.369130 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.468293 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.468340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.468356 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.468377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.468394 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.486082 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.490364 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.490424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.490447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.490476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.490498 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.508546 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.515504 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.515832 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.515979 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.516121 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.516298 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.537991 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.542489 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.542536 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.542548 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.542565 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.542577 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.558541 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.562872 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.562910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.562924 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.562946 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.562961 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.583652 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:15Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.583931 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.586077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.586121 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.586134 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.586150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.586188 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.630667 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.630993 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:15 crc kubenswrapper[4603]: E0930 19:47:15.631122 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:16.631098131 +0000 UTC m=+38.569556969 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.689327 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.689664 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.689872 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.690048 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.690251 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.793939 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.793983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.793993 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.794010 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.794021 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.896725 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.896785 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.896798 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.896812 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.896840 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:15Z","lastTransitionTime":"2025-09-30T19:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:15 crc kubenswrapper[4603]: I0930 19:47:15.999797 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.000150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.000335 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.000479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.000610 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.103291 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.103334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.103350 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.103373 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.103389 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.206623 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.206688 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.206710 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.206738 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.206761 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.310326 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.310398 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.310422 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.310444 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.310460 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.412881 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.412931 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.412954 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.412981 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.413003 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.516475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.516543 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.516558 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.516581 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.516600 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.619513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.619560 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.619570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.619586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.619597 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.643145 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.643399 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.643520 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:18.643494627 +0000 UTC m=+40.581953485 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.722499 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.722575 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.722607 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.722638 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.722661 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.763256 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.763423 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.763465 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.763575 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.763794 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.763935 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.764030 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:16 crc kubenswrapper[4603]: E0930 19:47:16.764155 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.825861 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.825905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.825918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.825934 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.825945 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.928039 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.928095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.928117 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.928142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:16 crc kubenswrapper[4603]: I0930 19:47:16.928192 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:16Z","lastTransitionTime":"2025-09-30T19:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.030644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.030683 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.030695 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.030711 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.030722 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.133897 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.133961 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.133978 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.134002 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.134019 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.235898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.235963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.235987 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.236019 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.236039 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.339366 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.339426 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.339448 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.339471 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.339488 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.442338 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.442391 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.442407 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.442430 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.442449 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.499318 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.500310 4603 scope.go:117] "RemoveContainer" containerID="ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8" Sep 30 19:47:17 crc kubenswrapper[4603]: E0930 19:47:17.500497 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.521056 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.535332 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.546013 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.546339 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.546476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.546580 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.546677 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.556421 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.575306 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.599230 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.621430 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.636795 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.649924 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.650003 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.650016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.650032 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.650043 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.650958 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.671013 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.689319 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.705195 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.735042 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.751909 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.752952 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.753046 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.753067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.753092 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.753109 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.766120 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.780883 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.798092 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.855970 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.856332 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.856465 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.856588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.856713 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.959675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.959778 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.959795 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.959817 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:17 crc kubenswrapper[4603]: I0930 19:47:17.959837 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:17Z","lastTransitionTime":"2025-09-30T19:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.063502 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.063840 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.064016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.064242 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.064406 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.167735 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.167775 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.167786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.167808 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.167819 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.270535 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.270591 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.270608 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.270629 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.270649 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.373839 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.373885 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.373894 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.373909 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.373919 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.477149 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.477266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.477283 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.477307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.477329 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.581773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.582156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.582366 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.582521 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.582639 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.664431 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.664668 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.664989 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:22.664961373 +0000 UTC m=+44.603420221 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.685618 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.685658 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.685668 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.685683 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.685695 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.763962 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.764056 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.764284 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.764397 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.764589 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.764935 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.765290 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:18 crc kubenswrapper[4603]: E0930 19:47:18.765382 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.765675 4603 scope.go:117] "RemoveContainer" containerID="9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.787616 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.788677 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.788733 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.788747 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.788766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.788777 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.813304 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.832420 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.852664 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.870580 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.884211 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.892865 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.892896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.892905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.892918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.892926 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.900666 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.914495 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.933346 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.947240 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.960558 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.976495 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994230 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994638 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994672 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994683 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994702 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:18 crc kubenswrapper[4603]: I0930 19:47:18.994717 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:18Z","lastTransitionTime":"2025-09-30T19:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.012690 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.029504 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.052811 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.093738 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.095392 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096344 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096458 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096496 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096509 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096527 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.096539 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.110882 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.124989 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.141739 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.164430 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.177274 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.188182 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.199333 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.199397 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.199407 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.199423 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.199436 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.203125 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.213965 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.229779 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.244901 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.256352 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.272863 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.290231 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.302011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.302066 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.302078 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.302095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.302109 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.312143 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.328144 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.342485 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.404938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.404990 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.405002 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.405018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.405029 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.507863 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.507924 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.507942 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.507966 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.507983 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.611093 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.611154 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.611202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.611227 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.611246 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.714220 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.714283 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.714302 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.714325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.714343 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.817071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.817112 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.817126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.817141 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.817151 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.919897 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.919932 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.919941 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.919957 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:19 crc kubenswrapper[4603]: I0930 19:47:19.919966 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:19Z","lastTransitionTime":"2025-09-30T19:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.022930 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.022981 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.022996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.023025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.023047 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.125778 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.126105 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.126282 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.126418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.126560 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.230489 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.230562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.230616 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.230646 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.230667 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.334354 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.334434 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.334457 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.334490 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.334514 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.437277 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.437341 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.437358 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.437383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.437401 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.541105 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.541238 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.541256 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.541281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.541299 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.644343 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.644390 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.644401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.644419 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.644431 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.748100 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.748228 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.748257 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.748291 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.748314 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.763527 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.763583 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:20 crc kubenswrapper[4603]: E0930 19:47:20.763732 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.763741 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.763803 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:20 crc kubenswrapper[4603]: E0930 19:47:20.763953 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:20 crc kubenswrapper[4603]: E0930 19:47:20.764077 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:20 crc kubenswrapper[4603]: E0930 19:47:20.764307 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.851239 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.851309 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.851327 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.851365 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.851384 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.954994 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.955078 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.955103 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.955133 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:20 crc kubenswrapper[4603]: I0930 19:47:20.955155 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:20Z","lastTransitionTime":"2025-09-30T19:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.058713 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.058774 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.058792 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.058817 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.058840 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.162111 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.162220 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.162266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.162306 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.162334 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.265773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.265826 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.265850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.265879 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.265899 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.369011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.369067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.369078 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.369107 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.369120 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.472009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.472053 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.472065 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.472082 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.472095 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.574944 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.575023 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.575044 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.575077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.575100 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.678806 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.678879 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.678889 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.678911 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.678922 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.781703 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.781770 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.781787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.781811 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.781829 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.885233 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.885297 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.885315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.885339 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:21 crc kubenswrapper[4603]: I0930 19:47:21.885355 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:21Z","lastTransitionTime":"2025-09-30T19:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.035210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.035288 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.035303 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.035319 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.035333 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.144705 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.144773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.144797 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.144824 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.144847 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.247726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.247775 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.247785 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.247801 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.247812 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.350433 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.350475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.350487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.350508 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.350524 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.453383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.453446 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.453464 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.453492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.453509 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.556205 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.556275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.556287 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.556313 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.556326 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.659497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.659574 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.659597 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.659625 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.659648 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.713961 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.714211 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.714361 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:30.714325299 +0000 UTC m=+52.652784197 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.763309 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.763474 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.764119 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.764257 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.764330 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.764412 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.764580 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:22 crc kubenswrapper[4603]: E0930 19:47:22.764685 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.764980 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.765126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.765146 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.765196 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.765246 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.868295 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.868366 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.868383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.868408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.868426 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.972127 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.972203 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.972241 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.972260 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:22 crc kubenswrapper[4603]: I0930 19:47:22.972273 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:22Z","lastTransitionTime":"2025-09-30T19:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.074745 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.074803 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.074815 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.074837 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.074852 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.178076 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.178150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.178179 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.178195 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.178209 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.280562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.280640 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.280652 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.280669 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.280685 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.383334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.383384 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.383400 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.383423 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.383437 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.486751 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.487057 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.487219 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.487323 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.487436 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.589995 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.590067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.590085 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.590111 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.590128 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.692927 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.693023 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.693049 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.693082 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.693107 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.796389 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.796490 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.796506 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.796529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.796543 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.899898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.899984 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.900018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.900048 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:23 crc kubenswrapper[4603]: I0930 19:47:23.900065 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:23Z","lastTransitionTime":"2025-09-30T19:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.002823 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.002875 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.002890 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.002915 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.002933 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.105930 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.105988 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.106007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.106031 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.106048 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.210059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.210145 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.210202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.210235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.210258 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.313001 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.313073 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.313091 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.313120 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.313139 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.416768 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.416824 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.416842 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.416866 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.416884 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.519589 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.519659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.519680 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.519707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.519731 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.622695 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.622772 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.622796 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.622827 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.622850 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.725251 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.725319 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.725330 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.725344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.725355 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.764222 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:24 crc kubenswrapper[4603]: E0930 19:47:24.764363 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.764367 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.764410 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.764387 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:24 crc kubenswrapper[4603]: E0930 19:47:24.764558 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:24 crc kubenswrapper[4603]: E0930 19:47:24.764637 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:24 crc kubenswrapper[4603]: E0930 19:47:24.764703 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.827996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.828041 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.828051 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.828069 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.828080 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.930850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.931283 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.931478 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.931660 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:24 crc kubenswrapper[4603]: I0930 19:47:24.931830 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:24Z","lastTransitionTime":"2025-09-30T19:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.035350 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.035420 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.035439 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.035461 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.035478 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.138502 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.138910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.139140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.139377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.139549 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.241785 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.242128 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.242356 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.242539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.242685 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.345754 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.345835 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.345855 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.345883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.345902 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.448563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.448626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.448644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.448668 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.448684 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.552543 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.552609 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.552626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.552651 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.552671 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.655856 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.655920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.655937 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.655961 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.655978 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.758862 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.758922 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.758931 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.758953 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.758963 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.862437 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.862490 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.862501 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.862520 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.862533 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.870604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.870631 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.870640 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.870653 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.870663 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.891649 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:25Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.897259 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.897325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.897334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.897360 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.897373 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.914942 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:25Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.920331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.920392 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.920401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.920424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.920442 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.941826 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:25Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.947114 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.947230 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.947246 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.947274 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.947287 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.965205 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:25Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.970601 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.970654 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.970665 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.970686 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.970699 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.991485 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:25Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:25 crc kubenswrapper[4603]: E0930 19:47:25.991621 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.993988 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.994026 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.994039 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.994063 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:25 crc kubenswrapper[4603]: I0930 19:47:25.994076 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:25Z","lastTransitionTime":"2025-09-30T19:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.097743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.097817 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.097843 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.097875 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.097898 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.205480 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.205588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.205604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.205627 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.205644 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.309395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.309462 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.309475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.309496 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.309508 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.413765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.414281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.414507 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.414589 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.414613 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.518549 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.518619 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.518636 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.518663 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.518679 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.621601 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.621659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.621676 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.621698 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.621714 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.725384 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.725464 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.725485 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.725511 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.725530 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.764124 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:26 crc kubenswrapper[4603]: E0930 19:47:26.764699 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.764184 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:26 crc kubenswrapper[4603]: E0930 19:47:26.764907 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.764184 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:26 crc kubenswrapper[4603]: E0930 19:47:26.765126 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.764234 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:26 crc kubenswrapper[4603]: E0930 19:47:26.765327 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.828818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.828916 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.828936 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.828963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.828979 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.931211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.931269 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.931281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.931297 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:26 crc kubenswrapper[4603]: I0930 19:47:26.931306 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:26Z","lastTransitionTime":"2025-09-30T19:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.034128 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.034292 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.034317 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.034346 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.034371 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.137563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.137724 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.137760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.137792 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.137815 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.240657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.240735 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.240756 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.240783 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.240801 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.343963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.344025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.344043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.344067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.344086 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.447437 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.447500 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.447519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.447542 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.447559 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.551589 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.551624 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.551634 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.551666 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.551679 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.655985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.656059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.656081 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.656112 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.656134 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.759374 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.759432 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.759447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.759468 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.759486 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.861927 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.861969 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.861983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.862001 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.862015 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.964493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.964556 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.964576 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.964600 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:27 crc kubenswrapper[4603]: I0930 19:47:27.964618 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:27Z","lastTransitionTime":"2025-09-30T19:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.068016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.068076 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.068098 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.068131 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.068153 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.170821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.170882 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.170903 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.170921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.170932 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.274206 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.274281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.274291 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.274314 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.274328 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.377572 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.377630 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.377642 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.377662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.377673 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.480519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.480568 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.480584 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.480607 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.480625 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.583344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.583403 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.583415 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.583436 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.583449 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.686534 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.686604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.686620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.686642 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.686662 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.765007 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:28 crc kubenswrapper[4603]: E0930 19:47:28.765214 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.765519 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:28 crc kubenswrapper[4603]: E0930 19:47:28.765583 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.765628 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:28 crc kubenswrapper[4603]: E0930 19:47:28.765680 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.765719 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:28 crc kubenswrapper[4603]: E0930 19:47:28.765774 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.790472 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.792223 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.792967 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.793352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.793614 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.794132 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.808619 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.827923 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.850906 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.866506 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.884509 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.898883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.898944 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.898960 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.898987 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.899004 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:28Z","lastTransitionTime":"2025-09-30T19:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.903003 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.917379 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.936858 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.951801 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.970734 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:28 crc kubenswrapper[4603]: I0930 19:47:28.985961 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:28Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.000860 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.000959 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.000972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.000989 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.001001 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.005054 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:29Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.026951 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:29Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.047620 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:29Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.073220 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:29Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.103098 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.103132 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.103140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.103152 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.103176 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.206610 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.206675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.206691 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.206718 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.206736 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.310018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.310094 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.310208 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.310238 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.310257 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.412989 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.413057 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.413095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.413119 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.413136 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.515845 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.515943 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.515960 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.515983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.516001 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.618847 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.618909 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.618926 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.618949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.618966 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.722466 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.722521 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.722538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.722560 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.722576 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.764873 4603 scope.go:117] "RemoveContainer" containerID="ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.826141 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.826186 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.826196 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.826211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.826221 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.930584 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.931027 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.931047 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.931070 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:29 crc kubenswrapper[4603]: I0930 19:47:29.931088 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:29Z","lastTransitionTime":"2025-09-30T19:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.033891 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.033963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.033980 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.034005 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.034024 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.136287 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.136311 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.136319 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.136334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.136343 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.239623 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.239970 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.240280 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.240589 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.241247 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.344404 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.344430 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.344438 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.344452 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.344462 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.446505 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.446575 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.446594 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.446618 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.446634 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.502934 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.503200 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:48:02.503181542 +0000 UTC m=+84.441640360 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.548403 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.548427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.548438 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.548450 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.548458 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.603504 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.603543 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.603564 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.603665 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.603678 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.603687 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.603726 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:48:02.603713642 +0000 UTC m=+84.542172460 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.604026 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.604053 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:48:02.604045222 +0000 UTC m=+84.542504040 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.604146 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.604281 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:48:02.604256477 +0000 UTC m=+84.542715295 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.650523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.650588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.650607 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.650631 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.650649 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.705041 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.705333 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.705376 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.705401 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.705487 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:48:02.705459417 +0000 UTC m=+84.643918275 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.753468 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.753527 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.753538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.753561 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.753577 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.763873 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.763927 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.763915 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.763840 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.764148 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.764231 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.764294 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.764436 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.806330 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.806608 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: E0930 19:47:30.806779 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:47:46.806751819 +0000 UTC m=+68.745210677 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.856385 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.856475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.856495 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.856521 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.856569 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.959126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.959200 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.959217 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.959242 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:30 crc kubenswrapper[4603]: I0930 19:47:30.959259 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:30Z","lastTransitionTime":"2025-09-30T19:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.061975 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.062238 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.062250 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.062270 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.062282 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.143810 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/1.log" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.148109 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.148511 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.165971 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.166029 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.166048 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.166072 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.166089 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.169466 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.185295 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.211784 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.228977 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.243408 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.260280 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.269483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.269539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.269551 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.269569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.269583 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.274461 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.296476 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.318483 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.340059 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.353874 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.371554 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.372659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.372883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.373037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.373226 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.373408 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.389508 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.406266 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.429744 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.442520 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:31Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.476871 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.476995 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.477014 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.477037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.477056 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.580529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.580596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.580608 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.580645 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.580659 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.684370 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.684445 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.684471 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.684503 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.684544 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.787744 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.787805 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.787827 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.788011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.788045 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.891594 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.891669 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.891717 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.891743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.891756 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.994572 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.994604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.994612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.994625 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:31 crc kubenswrapper[4603]: I0930 19:47:31.994634 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:31Z","lastTransitionTime":"2025-09-30T19:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.097022 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.097075 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.097091 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.097121 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.097139 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.154088 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/2.log" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.154990 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/1.log" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.158622 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" exitCode=1 Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.158670 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.158713 4603 scope.go:117] "RemoveContainer" containerID="ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.159751 4603 scope.go:117] "RemoveContainer" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" Sep 30 19:47:32 crc kubenswrapper[4603]: E0930 19:47:32.160021 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.179527 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.200469 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.200633 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.201157 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.201266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.201347 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.201371 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.220440 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.240195 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.252631 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.267147 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.287692 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.301723 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.304071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.304140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.304216 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.304254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.304276 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.328344 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5a956d5ee9bca7c5fc0a4d9dd40984bc7ab6fe51e557338f3a98f0033389b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 19:47:11.979880 5939 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 19:47:11.979931 5939 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 19:47:11.979978 5939 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 19:47:11.979979 5939 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 19:47:11.979993 5939 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 19:47:11.980134 5939 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 19:47:11.980135 5939 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 19:47:11.980206 5939 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 19:47:11.980211 5939 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 19:47:11.980223 5939 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 19:47:11.980219 5939 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 19:47:11.980237 5939 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 19:47:11.980007 5939 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 19:47:11.980261 5939 factory.go:656] Stopping watch factory\\\\nI0930 19:47:11.980289 5939 ovnkube.go:599] Stopped ovnkube\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.339562 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.352751 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.365646 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.379450 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.392516 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.405575 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.406967 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.407025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.407038 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.407056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.407070 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.416711 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:32Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.509479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.509525 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.509536 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.509555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.509566 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.611706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.611739 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.611747 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.611761 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.611769 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.714695 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.714786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.714809 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.714841 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.714864 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.763888 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.763980 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.763910 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:32 crc kubenswrapper[4603]: E0930 19:47:32.764129 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.764208 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:32 crc kubenswrapper[4603]: E0930 19:47:32.764375 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:32 crc kubenswrapper[4603]: E0930 19:47:32.765011 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:32 crc kubenswrapper[4603]: E0930 19:47:32.764917 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.817737 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.817787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.817795 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.817811 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.817821 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.921331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.921391 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.921409 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.921478 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:32 crc kubenswrapper[4603]: I0930 19:47:32.921506 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:32Z","lastTransitionTime":"2025-09-30T19:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.024361 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.024401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.024409 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.024421 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.024430 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.127124 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.127231 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.127245 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.127263 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.127275 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.165102 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/2.log" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.168871 4603 scope.go:117] "RemoveContainer" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" Sep 30 19:47:33 crc kubenswrapper[4603]: E0930 19:47:33.169064 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.183911 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.196592 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.210890 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.223084 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.229619 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.229661 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.229670 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.229730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.229751 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.239872 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.252949 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.266783 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.280928 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.296456 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.307962 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.319849 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.331586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.331634 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.331647 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.331663 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.331676 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.340073 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.351067 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.362998 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.376511 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.391579 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:33Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.433590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.433620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.433628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.433641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.433650 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.535635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.535669 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.535678 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.535691 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.535699 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.638492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.638537 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.638554 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.638573 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.638588 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.741555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.742195 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.742294 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.742382 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.742462 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.844607 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.845016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.845150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.845340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.845480 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.948451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.948501 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.948510 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.948524 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:33 crc kubenswrapper[4603]: I0930 19:47:33.948534 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:33Z","lastTransitionTime":"2025-09-30T19:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.051616 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.051671 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.051688 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.051713 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.051731 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.155821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.155871 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.155887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.155908 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.155925 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.258241 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.258276 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.258284 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.258297 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.258305 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.360725 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.361229 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.361391 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.361598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.361749 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.464564 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.464608 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.464617 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.464633 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.464644 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.567517 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.567850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.567943 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.568268 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.568501 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.670479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.670744 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.670936 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.671142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.671380 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.763907 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:34 crc kubenswrapper[4603]: E0930 19:47:34.764966 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.763969 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:34 crc kubenswrapper[4603]: E0930 19:47:34.765230 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.763968 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:34 crc kubenswrapper[4603]: E0930 19:47:34.765495 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.764353 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:34 crc kubenswrapper[4603]: E0930 19:47:34.765733 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.773729 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.773765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.773777 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.773791 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.773801 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.876155 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.876497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.876672 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.876779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.876873 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.980070 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.980141 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.980192 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.980228 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:34 crc kubenswrapper[4603]: I0930 19:47:34.980249 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:34Z","lastTransitionTime":"2025-09-30T19:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.083799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.084209 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.084449 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.084713 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.084999 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.187585 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.187648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.187671 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.187703 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.187726 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.290793 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.290835 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.290845 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.290892 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.290903 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.393637 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.393719 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.393742 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.393770 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.393794 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.415605 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.432854 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.438664 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.458230 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.487442 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.496300 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.496371 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.496395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.496424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.496447 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.501661 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.517398 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.537028 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.552788 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.568424 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.583837 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.597827 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.598696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.598758 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.598774 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.598796 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.598818 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.613957 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.635857 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.652035 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.670288 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.686659 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.701344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.701623 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.701732 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.701834 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.701921 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.706660 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:35Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.804727 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.804823 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.804840 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.804860 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.804876 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.907315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.907382 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.907402 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.907427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:35 crc kubenswrapper[4603]: I0930 19:47:35.907444 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:35Z","lastTransitionTime":"2025-09-30T19:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.010969 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.011380 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.011570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.011760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.011978 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.060136 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.060243 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.060261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.060286 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.060304 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.080043 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.084400 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.084438 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.084449 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.084465 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.084478 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.102985 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.108011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.108065 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.108076 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.108093 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.108115 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.129292 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.133544 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.133586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.133596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.133613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.133626 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.148706 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.152667 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.152707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.152718 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.152736 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.152748 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.167093 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:36Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.167291 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.169015 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.169111 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.169125 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.169140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.169185 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.271526 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.271625 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.271642 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.271667 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.271685 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.374426 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.374476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.374487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.374505 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.374521 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.477601 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.477670 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.477686 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.477710 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.477726 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.580701 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.580762 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.580779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.580803 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.580819 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.683583 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.683641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.683662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.683688 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.683708 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.763711 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.763804 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.763886 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.763895 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.763709 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.763990 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.764143 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:36 crc kubenswrapper[4603]: E0930 19:47:36.764304 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.786854 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.786958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.786978 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.787043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.787063 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.890107 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.890205 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.890218 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.890271 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.890286 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.994491 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.994580 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.994597 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.994620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:36 crc kubenswrapper[4603]: I0930 19:47:36.994641 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:36Z","lastTransitionTime":"2025-09-30T19:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.098270 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.098325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.098336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.098357 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.098370 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.201433 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.201497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.201514 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.201557 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.201575 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.304952 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.304996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.305006 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.305021 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.305032 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.408313 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.408364 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.408374 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.408388 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.408398 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.511140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.511244 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.511261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.511285 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.511304 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.614015 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.614074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.614086 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.614105 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.614119 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.717663 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.717725 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.717742 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.717766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.717783 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.820896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.820965 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.820989 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.821019 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.821040 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.924519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.924588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.924605 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.924627 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:37 crc kubenswrapper[4603]: I0930 19:47:37.924647 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:37Z","lastTransitionTime":"2025-09-30T19:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.027527 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.027595 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.027613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.027637 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.027655 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.131211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.131276 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.131293 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.131317 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.131335 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.234277 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.234334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.234351 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.234379 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.234398 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.337022 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.337156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.337193 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.337208 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.337218 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.439873 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.439925 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.439936 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.439951 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.439964 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.489636 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.510856 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.530205 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.543613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.543682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.543702 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.543729 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.543748 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.552118 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.570042 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.590451 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.607075 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.624397 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.637150 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.646041 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.646096 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.646106 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.646121 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.646454 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.657245 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.673867 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.696871 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.714612 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.744289 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.749703 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.749743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.749753 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.749771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.749783 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.759058 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.764329 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.764352 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.764358 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:38 crc kubenswrapper[4603]: E0930 19:47:38.764496 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.764526 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:38 crc kubenswrapper[4603]: E0930 19:47:38.764666 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:38 crc kubenswrapper[4603]: E0930 19:47:38.764751 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:38 crc kubenswrapper[4603]: E0930 19:47:38.764818 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.773471 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.793753 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.810878 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.827232 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.843324 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.852657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.852722 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.852743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.852768 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.852786 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.865046 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.880549 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.897055 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.926109 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.938848 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.953270 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.957295 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.957348 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.957361 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.957380 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.957394 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:38Z","lastTransitionTime":"2025-09-30T19:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.971845 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:38 crc kubenswrapper[4603]: I0930 19:47:38.988687 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:38Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.008579 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.025726 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.045524 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.060891 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.061252 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.061274 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.061292 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.061304 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.060900 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.090247 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.115589 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.133885 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:39Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.163888 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.163955 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.163971 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.163995 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.164015 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.266644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.266682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.266693 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.266707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.266716 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.369828 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.369890 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.369909 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.369933 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.369950 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.472958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.473019 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.473038 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.473061 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.473079 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.575849 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.575924 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.575943 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.575966 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.575983 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.679207 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.679264 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.679281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.679303 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.679319 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.782145 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.782262 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.782279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.782302 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.782320 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.885633 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.885694 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.885706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.885722 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.885732 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.989018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.989071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.989088 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.989111 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:39 crc kubenswrapper[4603]: I0930 19:47:39.989128 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:39Z","lastTransitionTime":"2025-09-30T19:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.091085 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.091128 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.091138 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.091154 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.091196 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.193224 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.193263 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.193275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.193289 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.193301 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.296071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.296131 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.296143 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.296185 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.296203 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.398594 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.398662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.398671 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.398688 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.398699 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.501942 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.502021 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.502044 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.502080 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.502101 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.604693 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.604767 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.604787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.604813 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.604834 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.708961 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.709025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.709042 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.709080 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.709099 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.763970 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.764114 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:40 crc kubenswrapper[4603]: E0930 19:47:40.764153 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:40 crc kubenswrapper[4603]: E0930 19:47:40.764351 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.764227 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.764315 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:40 crc kubenswrapper[4603]: E0930 19:47:40.764473 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:40 crc kubenswrapper[4603]: E0930 19:47:40.764548 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.811296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.811354 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.811368 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.811395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.811415 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.913789 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.913833 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.913844 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.913886 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:40 crc kubenswrapper[4603]: I0930 19:47:40.913899 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:40Z","lastTransitionTime":"2025-09-30T19:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.016398 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.016456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.016473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.016496 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.016514 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.118841 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.118936 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.118951 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.118973 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.118989 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.220797 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.220829 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.220837 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.220853 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.220863 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.322927 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.322986 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.322996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.323014 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.323025 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.425508 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.425564 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.425581 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.425603 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.425622 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.529584 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.530302 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.530722 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.531063 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.531436 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.634118 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.634401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.634483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.634559 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.634624 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.737791 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.738060 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.738230 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.738416 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.738500 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.840589 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.840962 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.841042 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.841113 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.841212 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.945521 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.946000 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.946340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.946495 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:41 crc kubenswrapper[4603]: I0930 19:47:41.946619 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:41Z","lastTransitionTime":"2025-09-30T19:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.050939 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.051020 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.051037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.051061 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.051077 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.154470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.154571 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.154630 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.154655 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.154711 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.257737 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.258266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.259043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.259601 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.259985 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.364114 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.364187 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.364202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.364219 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.364231 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.466652 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.466704 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.466722 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.466743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.466760 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.570267 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.570310 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.570321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.570340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.570352 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.673038 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.673071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.673079 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.673092 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.673102 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.763984 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.764069 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.763996 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.764201 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:42 crc kubenswrapper[4603]: E0930 19:47:42.764262 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:42 crc kubenswrapper[4603]: E0930 19:47:42.764337 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:42 crc kubenswrapper[4603]: E0930 19:47:42.764512 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:42 crc kubenswrapper[4603]: E0930 19:47:42.764596 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.775279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.775315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.775327 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.775344 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.775357 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.877561 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.877596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.877605 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.877616 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.877625 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.980223 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.980266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.980277 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.980294 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:42 crc kubenswrapper[4603]: I0930 19:47:42.980311 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:42Z","lastTransitionTime":"2025-09-30T19:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.083516 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.083576 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.083596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.083617 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.083633 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.185740 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.185778 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.185786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.185800 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.185813 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.287768 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.288028 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.288120 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.288214 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.288279 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.391566 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.391612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.391631 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.391658 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.391676 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.494286 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.494377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.494401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.494447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.494470 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.597381 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.597439 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.597457 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.597480 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.597501 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.700351 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.700780 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.701144 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.701383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.701537 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.804338 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.804384 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.804394 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.804410 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.804420 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.906496 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.906534 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.906546 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.906563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:43 crc kubenswrapper[4603]: I0930 19:47:43.906573 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:43Z","lastTransitionTime":"2025-09-30T19:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.008584 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.008659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.008675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.008699 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.008717 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.111580 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.111645 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.111662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.111717 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.111739 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.213535 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.213570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.213578 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.213591 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.213599 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.316945 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.316972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.316982 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.316993 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.317016 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.419298 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.419349 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.419361 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.419381 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.419394 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.521279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.521312 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.521321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.521336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.521346 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.623321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.623348 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.623356 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.623367 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.623375 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.725275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.725303 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.725312 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.725323 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.725330 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.764245 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:44 crc kubenswrapper[4603]: E0930 19:47:44.764555 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.764252 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.764246 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:44 crc kubenswrapper[4603]: E0930 19:47:44.764620 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.764284 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:44 crc kubenswrapper[4603]: E0930 19:47:44.764665 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:44 crc kubenswrapper[4603]: E0930 19:47:44.764699 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.827891 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.827982 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.828000 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.828024 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.828044 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.930254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.930575 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.930645 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.930714 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:44 crc kubenswrapper[4603]: I0930 19:47:44.930773 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:44Z","lastTransitionTime":"2025-09-30T19:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.033134 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.033627 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.033851 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.034045 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.034243 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.137696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.138595 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.138767 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.138938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.139106 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.243234 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.243292 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.243309 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.243332 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.243348 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.346473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.346513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.346524 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.346537 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.346547 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.448804 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.449095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.449336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.449708 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.449877 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.552026 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.552339 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.552449 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.552528 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.552590 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.654299 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.654331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.654339 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.654352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.654361 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.756721 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.756762 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.756773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.756790 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.756802 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.859389 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.859433 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.859442 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.859456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.859465 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.961486 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.961523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.961531 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.961546 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:45 crc kubenswrapper[4603]: I0930 19:47:45.961557 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:45Z","lastTransitionTime":"2025-09-30T19:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.064552 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.064932 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.065089 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.065257 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.065402 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.167823 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.167879 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.167895 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.167919 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.167938 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.270191 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.270222 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.270230 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.270245 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.270255 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.372991 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.373029 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.373041 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.373059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.373072 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.470825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.471146 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.471253 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.471325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.471399 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.491931 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.495570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.495679 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.495760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.495825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.495906 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.513735 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.518067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.518116 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.518130 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.518150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.518192 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.531860 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.535768 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.535948 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.536009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.536074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.536133 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.555449 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.559239 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.559300 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.559315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.559331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.559342 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.570152 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:46Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.570322 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.571939 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.572182 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.572277 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.572362 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.572443 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.675408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.675443 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.675456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.675470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.675481 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.766404 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.766468 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.766669 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.766505 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.766778 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.766868 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.766929 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.767018 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.777593 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.777646 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.777664 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.777685 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.777707 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.874581 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.874726 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:46 crc kubenswrapper[4603]: E0930 19:47:46.874775 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:48:18.874761227 +0000 UTC m=+100.813220035 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.880023 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.880076 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.880093 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.880118 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.880136 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.982610 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.982674 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.982691 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.982715 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:46 crc kubenswrapper[4603]: I0930 19:47:46.982732 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:46Z","lastTransitionTime":"2025-09-30T19:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.085088 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.085147 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.085190 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.085214 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.085232 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.188431 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.188489 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.188506 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.188530 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.188547 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.291533 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.291586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.291602 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.291625 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.291642 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.394411 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.394510 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.394529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.394613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.394685 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.497458 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.497507 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.497523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.497544 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.497562 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.599928 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.599983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.600005 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.600043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.600065 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.702574 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.702648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.702666 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.702690 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.702706 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.804898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.804925 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.804933 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.804947 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.804957 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.907949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.908011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.908028 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.908050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:47 crc kubenswrapper[4603]: I0930 19:47:47.908068 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:47Z","lastTransitionTime":"2025-09-30T19:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.010425 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.010826 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.010926 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.011024 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.011124 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.114567 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.114850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.114922 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.114998 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.115060 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.216228 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/0.log" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.216284 4603 generic.go:334] "Generic (PLEG): container finished" podID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" containerID="bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b" exitCode=1 Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.216316 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerDied","Data":"bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.216729 4603 scope.go:117] "RemoveContainer" containerID="bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.217059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.217250 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.217331 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.217443 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.217588 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.231427 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.247152 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.260917 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.277156 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.291184 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.305440 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.320603 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.321079 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.321120 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.321132 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.321148 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.321174 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.331500 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.348192 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.359193 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.375478 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.390471 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.412927 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.423042 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.424092 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.424126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.424136 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.424154 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.424216 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.434144 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.448787 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.460328 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.526787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.526825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.526833 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.526845 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.526854 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.633269 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.633311 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.633322 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.633336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.633346 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.735181 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.735210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.735221 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.735233 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.735240 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.763946 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:48 crc kubenswrapper[4603]: E0930 19:47:48.764110 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.764232 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:48 crc kubenswrapper[4603]: E0930 19:47:48.764295 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.764335 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:48 crc kubenswrapper[4603]: E0930 19:47:48.764373 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.764744 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.764994 4603 scope.go:117] "RemoveContainer" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" Sep 30 19:47:48 crc kubenswrapper[4603]: E0930 19:47:48.764991 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:48 crc kubenswrapper[4603]: E0930 19:47:48.765147 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.779071 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.799473 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.814738 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.828833 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.838980 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.839415 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.839569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.839863 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.840005 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.843569 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.875370 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.892260 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.903810 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.916262 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.925999 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.937570 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.942819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.942849 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.942858 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.942871 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.942881 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:48Z","lastTransitionTime":"2025-09-30T19:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.947738 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.962083 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.975867 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:48 crc kubenswrapper[4603]: I0930 19:47:48.991907 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:48Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.003842 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.016425 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.045337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.045395 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.045405 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.045418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.045428 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.147366 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.147418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.147434 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.147460 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.147476 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.223130 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/0.log" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.223198 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerStarted","Data":"6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.238262 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.250840 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.250900 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.250921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.250944 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.250963 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.258560 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.272811 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.283956 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.292894 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.306568 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.321239 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.332010 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.343749 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.353104 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.353142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.353152 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.353216 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.353233 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.364218 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.377437 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.389223 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.404860 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.439560 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.455926 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.455953 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.455961 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.455975 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.455984 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.459792 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.479082 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.501946 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:49Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.558545 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.558576 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.558585 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.558598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.558608 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.661771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.661841 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.661861 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.661886 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.661904 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.765147 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.765211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.765224 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.765241 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.765262 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.867487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.867525 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.867537 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.867553 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.867565 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.970434 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.970472 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.970483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.970499 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:49 crc kubenswrapper[4603]: I0930 19:47:49.970511 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:49Z","lastTransitionTime":"2025-09-30T19:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.073801 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.073884 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.073900 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.073921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.073938 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.176520 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.176565 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.176582 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.176604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.176620 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.279923 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.279985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.280008 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.280040 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.280064 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.382702 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.382759 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.382777 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.382800 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.382817 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.485250 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.485287 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.485296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.485308 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.485316 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.587569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.587612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.587625 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.587641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.587652 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.690181 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.690224 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.690232 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.690247 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.690259 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.763729 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.763868 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.763918 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.763973 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:50 crc kubenswrapper[4603]: E0930 19:47:50.764047 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:50 crc kubenswrapper[4603]: E0930 19:47:50.764211 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:50 crc kubenswrapper[4603]: E0930 19:47:50.764329 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:50 crc kubenswrapper[4603]: E0930 19:47:50.764416 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.816512 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.816731 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.816811 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.816883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.816953 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.920321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.920372 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.920388 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.920411 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:50 crc kubenswrapper[4603]: I0930 19:47:50.920427 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:50Z","lastTransitionTime":"2025-09-30T19:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.023732 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.023762 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.023770 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.023784 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.023792 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.127807 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.127859 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.127875 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.127896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.127915 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.231295 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.231335 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.231351 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.231371 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.231387 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.334098 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.336418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.337283 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.337516 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.337538 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.440427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.440469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.440479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.440493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.440502 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.543228 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.543279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.543288 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.543303 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.543314 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.645680 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.645800 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.645818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.645835 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.645847 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.748846 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.748890 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.748901 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.748919 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.748930 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.852122 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.852212 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.852229 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.852254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.852270 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.955233 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.955286 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.955298 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.955315 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:51 crc kubenswrapper[4603]: I0930 19:47:51.955329 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:51Z","lastTransitionTime":"2025-09-30T19:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.057807 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.057846 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.057855 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.057870 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.057879 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.160638 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.160686 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.160698 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.160714 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.160730 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.263367 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.263428 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.263440 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.263456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.263465 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.366238 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.366272 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.366281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.366296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.366306 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.468340 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.468396 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.468414 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.468441 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.468459 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.570640 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.570693 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.570706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.570723 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.570758 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.673334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.673385 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.673402 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.673424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.673440 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.764221 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.764351 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.764409 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.764455 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:52 crc kubenswrapper[4603]: E0930 19:47:52.764363 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:52 crc kubenswrapper[4603]: E0930 19:47:52.764606 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:52 crc kubenswrapper[4603]: E0930 19:47:52.764640 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:52 crc kubenswrapper[4603]: E0930 19:47:52.764710 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.775473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.775674 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.775749 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.775824 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.775902 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.878662 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.878698 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.878707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.878722 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.878732 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.980996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.981039 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.981051 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.981067 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:52 crc kubenswrapper[4603]: I0930 19:47:52.981078 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:52Z","lastTransitionTime":"2025-09-30T19:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.084292 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.084334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.084345 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.084360 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.084373 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.187448 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.187497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.187513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.187536 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.187555 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.290311 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.290347 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.290360 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.290377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.290388 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.393866 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.393913 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.393930 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.393952 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.393968 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.497386 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.497543 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.497569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.497599 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.497622 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.600903 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.600940 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.600949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.600962 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.600973 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.703129 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.703192 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.703202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.703213 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.703222 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.805863 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.805910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.805928 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.805950 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.805967 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.908357 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.908424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.908441 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.908493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:53 crc kubenswrapper[4603]: I0930 19:47:53.908511 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:53Z","lastTransitionTime":"2025-09-30T19:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.011318 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.011386 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.011405 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.011428 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.011447 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.114624 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.114704 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.114723 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.114749 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.114767 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.217938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.218006 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.218023 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.218050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.218068 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.321116 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.321228 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.321247 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.321273 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.321291 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.423786 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.423889 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.423957 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.423991 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.424010 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.526989 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.527053 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.527074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.527102 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.527124 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.630049 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.630114 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.630130 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.630156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.630227 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.733296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.733364 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.733380 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.733404 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.733421 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.763850 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:54 crc kubenswrapper[4603]: E0930 19:47:54.764077 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.764291 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.764459 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:54 crc kubenswrapper[4603]: E0930 19:47:54.764674 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.764736 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:54 crc kubenswrapper[4603]: E0930 19:47:54.764941 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:54 crc kubenswrapper[4603]: E0930 19:47:54.764803 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.836857 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.836916 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.836932 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.836956 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.836973 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.940100 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.940418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.940475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.940499 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:54 crc kubenswrapper[4603]: I0930 19:47:54.940804 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:54Z","lastTransitionTime":"2025-09-30T19:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.043596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.043649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.043694 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.043716 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.043735 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.146054 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.146457 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.146676 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.146866 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.147041 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.249050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.249082 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.249091 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.249105 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.249115 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.351948 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.352012 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.352034 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.352059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.352075 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.455055 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.456021 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.456261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.456423 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.456551 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.559058 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.559101 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.559113 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.559127 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.559136 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.661703 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.662261 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.662356 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.662445 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.662533 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.765438 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.765511 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.765523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.765539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.765551 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.868723 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.869049 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.869216 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.869355 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.869467 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.972638 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.973040 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.973216 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.973343 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:55 crc kubenswrapper[4603]: I0930 19:47:55.973455 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:55Z","lastTransitionTime":"2025-09-30T19:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.075709 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.075997 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.076112 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.076221 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.076373 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.178717 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.178756 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.178766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.178780 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.178813 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.281382 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.281453 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.281475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.281503 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.281524 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.383814 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.384149 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.384352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.384526 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.384659 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.487874 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.487932 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.487950 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.487973 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.487989 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.592094 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.592147 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.592203 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.592273 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.592293 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.695519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.695801 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.695880 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.695980 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.696124 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.763355 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.763358 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.763456 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.763531 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.764270 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.764538 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.764683 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.764929 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.799202 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.799504 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.799624 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.799737 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.799819 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.846712 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.847093 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.847199 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.847285 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.847356 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.867883 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:56Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.873439 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.873512 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.873535 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.873566 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.873588 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.890606 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:56Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.896111 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.896159 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.896196 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.896211 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.896220 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.913064 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:56Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.917887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.917931 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.917943 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.917960 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.917971 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.930020 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:56Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.934612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.934665 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.934682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.934705 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.934722 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.953368 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:56Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:56 crc kubenswrapper[4603]: E0930 19:47:56.953645 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.956281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.956586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.956798 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.956999 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:56 crc kubenswrapper[4603]: I0930 19:47:56.957228 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:56Z","lastTransitionTime":"2025-09-30T19:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.060259 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.060301 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.060309 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.060338 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.060351 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.163198 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.163241 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.163252 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.163267 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.163277 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.265470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.265563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.265582 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.265604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.265621 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.368412 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.368473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.368489 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.368513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.368560 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.472469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.472530 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.472546 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.472597 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.472614 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.575553 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.575635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.575706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.575738 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.575762 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.678706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.678760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.678771 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.678789 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.678800 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.781796 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.781842 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.781851 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.781865 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.781874 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.884268 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.884329 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.884345 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.884368 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.884387 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.987819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.987878 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.987897 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.987921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:57 crc kubenswrapper[4603]: I0930 19:47:57.987941 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:57Z","lastTransitionTime":"2025-09-30T19:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.090445 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.090496 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.090504 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.090517 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.090525 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.193266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.193336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.193359 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.193388 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.193409 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.296799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.297235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.297254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.297279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.297328 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.400131 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.400203 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.400215 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.400235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.400579 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.503452 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.503492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.503502 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.503518 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.503530 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.606119 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.606199 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.606220 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.606243 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.606261 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.710086 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.710152 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.710204 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.710234 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.710254 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.763972 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.764099 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:47:58 crc kubenswrapper[4603]: E0930 19:47:58.764263 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.764297 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.764328 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:47:58 crc kubenswrapper[4603]: E0930 19:47:58.764588 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:47:58 crc kubenswrapper[4603]: E0930 19:47:58.764763 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:47:58 crc kubenswrapper[4603]: E0930 19:47:58.764860 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.781516 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.801036 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.814327 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.814375 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.814392 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.814417 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.814435 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.820868 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.841774 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.860005 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.886840 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.905437 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.916976 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.917050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.917079 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.917106 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.917156 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:58Z","lastTransitionTime":"2025-09-30T19:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.923426 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.951473 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.980515 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:58 crc kubenswrapper[4603]: I0930 19:47:58.993875 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:58Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.015791 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.021433 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.021470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.021479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.021493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.021502 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.036302 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.046872 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.058197 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.070495 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.087944 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:47:59Z is after 2025-08-24T17:21:41Z" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.172330 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.172371 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.172387 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.172413 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.172430 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.274710 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.274763 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.274778 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.274799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.274816 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.377730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.377781 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.377799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.377821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.377840 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.480693 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.480763 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.480779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.480804 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.480828 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.583728 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.583787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.583810 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.583839 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.583859 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.686377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.686413 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.686439 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.686453 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.686461 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.788726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.788784 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.788797 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.788811 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.788821 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.891206 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.891515 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.891598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.891680 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.891755 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.994918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.994995 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.995005 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.995063 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:47:59 crc kubenswrapper[4603]: I0930 19:47:59.995077 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:47:59Z","lastTransitionTime":"2025-09-30T19:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.097542 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.097615 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.097626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.097643 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.097657 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.201258 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.201325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.201345 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.201376 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.201398 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.304633 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.305064 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.305264 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.305425 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.305572 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.409133 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.409566 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.409785 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.409930 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.410052 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.513541 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.514051 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.514558 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.514758 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.514913 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.618269 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.618649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.618818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.619011 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.619461 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.722397 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.723323 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.723485 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.723649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.724419 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.765183 4603 scope.go:117] "RemoveContainer" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.765715 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.765774 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:00 crc kubenswrapper[4603]: E0930 19:48:00.766782 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.765853 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.765771 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:00 crc kubenswrapper[4603]: E0930 19:48:00.766952 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:00 crc kubenswrapper[4603]: E0930 19:48:00.766574 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:00 crc kubenswrapper[4603]: E0930 19:48:00.767052 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.831219 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.831245 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.831252 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.831264 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.831272 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.937534 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.937570 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.937590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.937610 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:00 crc kubenswrapper[4603]: I0930 19:48:00.937621 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:00Z","lastTransitionTime":"2025-09-30T19:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.039773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.039810 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.039821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.039836 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.039845 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.141766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.141810 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.141825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.141843 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.141858 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.243727 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.243765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.243775 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.243788 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.243799 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.276989 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/2.log" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.279355 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.280404 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.292694 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.303018 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.320227 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.333516 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.345689 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.345726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.345736 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.345751 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.345763 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.349549 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.364932 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.379251 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.392432 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.408081 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.421120 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.436356 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447538 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447796 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447805 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.447827 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.460617 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.472888 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.485034 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.498138 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.518540 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:48:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.551213 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.551248 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.551257 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.551271 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.551281 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.653548 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.653620 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.653634 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.653653 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.653665 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.757149 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.757210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.757227 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.757245 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.757258 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.859821 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.859870 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.859888 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.859907 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.859921 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.962664 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.962726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.962751 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.962777 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:01 crc kubenswrapper[4603]: I0930 19:48:01.962799 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:01Z","lastTransitionTime":"2025-09-30T19:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.066057 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.066098 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.066110 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.066124 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.066134 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.168911 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.168965 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.168982 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.169008 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.169024 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.272407 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.272469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.272485 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.272510 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.272527 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.285426 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/3.log" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.286672 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/2.log" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.291634 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" exitCode=1 Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.291702 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.291746 4603 scope.go:117] "RemoveContainer" containerID="02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.292828 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.293157 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.322004 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.341302 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.356560 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.376596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.376738 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.376766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.376856 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.376928 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.377517 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.394993 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.421033 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.440880 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.455208 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.469466 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.479814 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.479849 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.479857 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.479872 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.479881 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.483761 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.497355 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.516567 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.545841 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02fcc8503e270b17d6afbbfa2c6e95525fef15c77f916d0a66f865201cc7cef2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:31Z\\\",\\\"message\\\":\\\"ne-api-operator-webhook_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/machine-api-operator-webhook\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.254\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 19:47:31.110230 6165 services_controller.go:452] Built service openshift-machine-api/machine-api-operator-webhook per-node LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110245 6165 services_controller.go:453] Built service openshift-machine-api/machine-api-operator-webhook template LB for network=default: []services.LB{}\\\\nI0930 19:47:31.110258 6165 services_controller.go:454] Service openshift-machine-api/machine-api-operator-webhook for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0930 19:47:31.109588 6165 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:48:01Z\\\",\\\"message\\\":\\\"30 19:48:01.587515 6548 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0930 19:48:01.587427 6548 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-gn9pm\\\\nI0930 19:48:01.587524 6548 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-gn9pm in node crc\\\\nI0930 19:48:01.587529 6548 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-gn9pm after 0 failed attempt(s)\\\\nF0930 19:48:01.587330 6548 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:48:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.549950 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.550158 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.550117389 +0000 UTC m=+148.488576247 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.560047 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.576521 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.583010 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.583085 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.583103 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.583125 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.583144 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.596552 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.610934 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:02Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.651762 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.651825 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.651918 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652021 4603 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652029 4603 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652093 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.652071586 +0000 UTC m=+148.590530434 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652147 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.652118688 +0000 UTC m=+148.590577566 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652287 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652309 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652328 4603 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.652383 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.652360684 +0000 UTC m=+148.590819622 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.686267 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.686305 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.686316 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.686332 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.686343 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.755789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.755973 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.756140 4603 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.756203 4603 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.756302 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.756274317 +0000 UTC m=+148.694733175 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.763599 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.763733 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.763949 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.764308 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.764362 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.764479 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.764651 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:02 crc kubenswrapper[4603]: E0930 19:48:02.764898 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.779397 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.788901 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.788989 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.789007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.789031 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.789050 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.891762 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.891813 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.891824 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.891839 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.891852 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.994482 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.994539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.994550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.994566 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:02 crc kubenswrapper[4603]: I0930 19:48:02.994577 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:02Z","lastTransitionTime":"2025-09-30T19:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.098082 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.098396 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.098487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.098596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.098694 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.207110 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.207155 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.207185 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.207204 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.207216 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.296047 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/3.log" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.300906 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:48:03 crc kubenswrapper[4603]: E0930 19:48:03.301442 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.310039 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.310358 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.310985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.311410 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.311765 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.319136 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.335104 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.351540 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.369056 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.398912 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:48:01Z\\\",\\\"message\\\":\\\"30 19:48:01.587515 6548 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0930 19:48:01.587427 6548 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-gn9pm\\\\nI0930 19:48:01.587524 6548 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-gn9pm in node crc\\\\nI0930 19:48:01.587529 6548 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-gn9pm after 0 failed attempt(s)\\\\nF0930 19:48:01.587330 6548 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:48:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.415483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.415918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.416106 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.415547 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.416288 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.416580 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.434757 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.449377 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff270b4-4b95-47ee-96a1-1916c7a2b17a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fa33729df39dac39eb5ac092f4dcce8214c97279f6d6828ba2ba90051d77ed6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.469578 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.490346 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.507696 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.518693 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.518753 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.518768 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.518789 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.518806 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.521139 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.537578 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.555157 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.578330 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.598708 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.622352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.622427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.622453 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.622483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.622505 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.624624 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.637254 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:03Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.725468 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.725541 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.725562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.725587 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.725605 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.828304 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.828364 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.828380 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.828397 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.828409 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.931635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.931696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.931708 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.931727 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:03 crc kubenswrapper[4603]: I0930 19:48:03.931741 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:03Z","lastTransitionTime":"2025-09-30T19:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.037278 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.037702 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.037730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.037760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.037782 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.141287 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.141338 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.141352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.141372 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.141387 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.244090 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.244286 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.244312 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.244342 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.244364 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.348387 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.348443 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.348464 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.348487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.348504 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.450865 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.450909 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.450921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.450938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.450950 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.553866 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.553900 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.553911 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.553926 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.553934 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.655779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.655853 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.655861 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.655874 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.655883 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.758451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.758561 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.758581 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.758603 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.758620 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.763885 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.763938 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:04 crc kubenswrapper[4603]: E0930 19:48:04.764041 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.764120 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.764145 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:04 crc kubenswrapper[4603]: E0930 19:48:04.764256 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:04 crc kubenswrapper[4603]: E0930 19:48:04.764335 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:04 crc kubenswrapper[4603]: E0930 19:48:04.764439 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.861525 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.861579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.861596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.861619 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.861637 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.964874 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.965279 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.965471 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.965649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:04 crc kubenswrapper[4603]: I0930 19:48:04.965814 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:04Z","lastTransitionTime":"2025-09-30T19:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.069444 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.069506 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.069522 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.069546 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.069566 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.172151 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.172427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.172510 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.172644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.172741 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.274605 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.274909 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.275115 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.275353 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.275543 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.378538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.378857 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.379017 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.379216 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.379354 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.481979 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.482038 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.482048 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.482063 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.482074 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.584906 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.584966 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.584983 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.585010 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.585027 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.688466 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.688531 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.688548 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.688572 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.688588 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.791337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.791414 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.791432 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.791458 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.791476 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.894622 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.895729 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.896003 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.896251 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.896456 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.999486 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.999551 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.999579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.999608 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:05 crc kubenswrapper[4603]: I0930 19:48:05.999628 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:05Z","lastTransitionTime":"2025-09-30T19:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.102043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.102113 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.102136 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.102194 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.102216 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.205501 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.205575 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.205598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.205626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.205652 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.308745 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.308787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.308803 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.308825 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.308841 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.415097 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.415134 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.415142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.415156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.415185 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.517751 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.518084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.518251 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.518388 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.518507 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.622045 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.622115 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.622132 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.622156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.622197 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.725554 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.725943 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.726112 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.726370 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.726494 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.766583 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.767046 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:06 crc kubenswrapper[4603]: E0930 19:48:06.767239 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.767293 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.767335 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:06 crc kubenswrapper[4603]: E0930 19:48:06.766889 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:06 crc kubenswrapper[4603]: E0930 19:48:06.767485 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:06 crc kubenswrapper[4603]: E0930 19:48:06.767577 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.782107 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.830009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.830337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.830417 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.830513 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.830584 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.933628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.933678 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.933696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.933718 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:06 crc kubenswrapper[4603]: I0930 19:48:06.933736 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:06Z","lastTransitionTime":"2025-09-30T19:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.036971 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.037037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.037060 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.037089 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.037109 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.139921 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.140294 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.140501 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.140653 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.140781 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.243008 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.243059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.243077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.243102 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.243119 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.346564 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.346896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.347022 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.347271 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.347477 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.354598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.354728 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.354754 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.354777 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.354794 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.376882 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.381818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.381881 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.381896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.381920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.381937 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.402303 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.407084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.407140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.407156 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.407213 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.407235 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.426158 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.431050 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.431110 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.431123 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.431142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.431154 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.448144 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.452965 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.453019 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.453035 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.453059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.453078 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.473069 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:07Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:07 crc kubenswrapper[4603]: E0930 19:48:07.473338 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.475515 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.475563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.475581 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.475604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.475622 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.578792 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.578847 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.578859 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.578878 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.578890 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.682263 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.682309 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.682320 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.682343 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.682358 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.785249 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.785298 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.785311 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.785328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.785343 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.889400 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.889456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.889473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.889495 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.889513 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.992593 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.992641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.992653 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.992685 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:07 crc kubenswrapper[4603]: I0930 19:48:07.992697 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:07Z","lastTransitionTime":"2025-09-30T19:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.101537 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.101598 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.101617 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.101641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.101666 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.204660 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.204707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.204717 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.204733 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.204766 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.307770 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.307906 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.307928 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.307953 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.308004 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.410905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.411343 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.411520 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.411735 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.411881 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.515769 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.515819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.515830 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.515849 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.515859 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.619019 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.619058 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.619069 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.619084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.619095 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.722505 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.722557 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.722574 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.722596 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.722612 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.763316 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.763371 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.763379 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:08 crc kubenswrapper[4603]: E0930 19:48:08.763545 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:08 crc kubenswrapper[4603]: E0930 19:48:08.763634 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.763990 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:08 crc kubenswrapper[4603]: E0930 19:48:08.764134 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:08 crc kubenswrapper[4603]: E0930 19:48:08.764372 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.779900 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.795721 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.811481 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.828628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.829161 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.829605 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.830560 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.831459 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.832347 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.847843 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.871885 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.893206 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.910076 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.929639 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.935457 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.935531 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.935555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.935586 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.935607 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:08Z","lastTransitionTime":"2025-09-30T19:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.945289 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff270b4-4b95-47ee-96a1-1916c7a2b17a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fa33729df39dac39eb5ac092f4dcce8214c97279f6d6828ba2ba90051d77ed6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.966456 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:08 crc kubenswrapper[4603]: I0930 19:48:08.987273 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:08Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.003764 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.030877 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:48:01Z\\\",\\\"message\\\":\\\"30 19:48:01.587515 6548 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0930 19:48:01.587427 6548 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-gn9pm\\\\nI0930 19:48:01.587524 6548 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-gn9pm in node crc\\\\nI0930 19:48:01.587529 6548 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-gn9pm after 0 failed attempt(s)\\\\nF0930 19:48:01.587330 6548 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:48:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.039946 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.039994 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.040006 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.040024 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.040039 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.044871 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.058282 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.083566 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b2d7f80-07cc-4b41-a425-790c143444df\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f782fa9240d87d0243b8bca592fbe6aaa9c6acec0f8bdd1891e8faa02c8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdd7af980975ed9eb11145de47abe761f5706b439d1f657c9f74123a92450434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dda3eb1f012fcddd1cf943a29ae83678c98089cb7ccd10c19eba403281d2b034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b90c8bc4859808ec3f46a5b20b7b49824b49b6d2186b9ddad6bebdf50630d8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3730b24ab199fb09d7e4477cf8dcf4ad2d9bde57fc4282452c245b52a524be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.100999 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.113792 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:09Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.142687 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.142740 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.142749 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.142765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.142776 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.246579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.246948 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.247134 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.247411 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.247569 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.350275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.350342 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.350366 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.350398 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.350421 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.453472 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.453965 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.454126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.454337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.454476 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.558056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.558434 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.558673 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.558857 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.559001 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.661793 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.662085 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.662205 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.662303 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.662385 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.764964 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.765016 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.765033 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.765054 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.765073 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.868212 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.868272 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.868296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.868325 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.868347 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.971287 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.971345 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.971357 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.971374 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:09 crc kubenswrapper[4603]: I0930 19:48:09.971388 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:09Z","lastTransitionTime":"2025-09-30T19:48:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.074108 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.074195 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.074212 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.074240 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.074256 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.176875 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.176937 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.176954 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.176979 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.176995 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.280028 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.280086 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.280109 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.280135 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.280156 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.383255 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.383322 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.383338 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.383362 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.383379 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.485956 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.486018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.486034 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.486058 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.486076 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.588550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.588614 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.588682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.588712 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.588734 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.691736 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.691796 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.691814 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.691837 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.691856 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.764345 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.764415 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.764460 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:10 crc kubenswrapper[4603]: E0930 19:48:10.764543 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:10 crc kubenswrapper[4603]: E0930 19:48:10.764621 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.764682 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:10 crc kubenswrapper[4603]: E0930 19:48:10.765006 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:10 crc kubenswrapper[4603]: E0930 19:48:10.765343 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.793971 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.794440 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.794700 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.794910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.795452 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.898858 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.898896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.898905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.898920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:10 crc kubenswrapper[4603]: I0930 19:48:10.898931 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:10Z","lastTransitionTime":"2025-09-30T19:48:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.001198 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.001503 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.001678 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.001819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.001979 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.105416 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.105473 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.105493 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.105519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.105540 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.208126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.208628 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.208823 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.209018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.209319 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.313031 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.313378 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.313514 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.313690 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.313872 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.416270 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.416314 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.416321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.416337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.416348 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.519504 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.519545 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.519555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.519569 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.519578 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.621747 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.621805 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.621822 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.621846 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.621866 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.725083 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.725138 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.725154 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.725209 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.725228 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.827682 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.827739 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.827756 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.827779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.827811 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.930859 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.931274 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.931422 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.931612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:11 crc kubenswrapper[4603]: I0930 19:48:11.931758 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:11Z","lastTransitionTime":"2025-09-30T19:48:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.034326 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.034404 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.034420 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.034446 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.034463 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.136844 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.137459 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.137558 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.137647 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.137738 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.240453 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.240499 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.240511 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.240526 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.240538 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.343150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.343256 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.343307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.343333 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.343350 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.446426 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.446622 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.446641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.446666 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.446684 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.549715 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.549746 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.549753 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.549766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.549774 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.651997 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.652304 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.652517 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.652643 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.652672 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.755394 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.755469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.755492 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.755522 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.755546 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.763271 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.763363 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.763495 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:12 crc kubenswrapper[4603]: E0930 19:48:12.763630 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:12 crc kubenswrapper[4603]: E0930 19:48:12.763769 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.763749 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:12 crc kubenswrapper[4603]: E0930 19:48:12.763878 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:12 crc kubenswrapper[4603]: E0930 19:48:12.763490 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.858299 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.858377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.858402 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.858430 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.858451 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.961354 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.961408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.961431 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.961458 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:12 crc kubenswrapper[4603]: I0930 19:48:12.961479 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:12Z","lastTransitionTime":"2025-09-30T19:48:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.063674 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.063745 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.063766 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.063793 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.063812 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.166529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.166595 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.166613 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.166636 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.166653 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.269803 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.269866 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.269884 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.269912 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.269940 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.372999 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.373530 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.373683 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.373833 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.374035 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.477997 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.478062 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.478085 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.478115 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.478136 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.581289 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.581345 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.581363 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.581386 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.581403 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.683916 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.684312 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.684467 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.684590 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.684735 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.787447 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.787523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.787548 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.787576 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.787601 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.890657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.890715 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.890733 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.890760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.890777 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.993337 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.993408 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.993426 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.993449 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:13 crc kubenswrapper[4603]: I0930 19:48:13.993468 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:13Z","lastTransitionTime":"2025-09-30T19:48:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.097140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.097237 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.097254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.097281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.097296 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.199927 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.199964 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.199975 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.199993 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.200004 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.304002 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.304110 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.304128 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.304149 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.304202 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.407328 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.407359 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.407369 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.407383 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.407393 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.510318 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.510365 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.510376 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.510392 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.510405 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.613622 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.613947 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.614024 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.614104 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.614194 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.717079 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.717145 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.717208 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.717236 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.717255 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.763820 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.763901 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:14 crc kubenswrapper[4603]: E0930 19:48:14.763982 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.764053 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:14 crc kubenswrapper[4603]: E0930 19:48:14.764232 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.764262 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:14 crc kubenswrapper[4603]: E0930 19:48:14.764343 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:14 crc kubenswrapper[4603]: E0930 19:48:14.764481 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.820329 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.820399 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.820424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.820453 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.820478 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.923424 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.923529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.923550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.923579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:14 crc kubenswrapper[4603]: I0930 19:48:14.923601 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:14Z","lastTransitionTime":"2025-09-30T19:48:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.031074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.031126 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.031142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.031187 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.031206 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.134471 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.134542 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.134560 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.134582 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.134599 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.238007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.238097 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.238118 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.238145 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.238192 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.341009 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.341284 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.341372 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.341450 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.341511 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.443975 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.444330 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.444348 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.444372 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.444390 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.547307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.547385 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.547413 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.547446 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.547469 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.650058 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.650119 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.650135 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.650158 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.650212 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.753072 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.753232 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.753281 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.753305 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.753322 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.856092 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.856198 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.856224 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.856255 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.856274 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.959147 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.959282 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.959300 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.959322 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:15 crc kubenswrapper[4603]: I0930 19:48:15.959339 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:15Z","lastTransitionTime":"2025-09-30T19:48:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.061937 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.061978 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.061988 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.062003 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.062016 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.164789 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.164835 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.164851 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.164875 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.164892 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.267635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.267681 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.267691 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.267705 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.267715 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.370109 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.370190 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.370210 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.370239 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.370256 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.472940 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.473000 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.473017 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.473043 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.473061 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.576877 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.576938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.576956 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.576980 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.576998 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.680488 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.680538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.680555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.680578 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.680594 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.763500 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:16 crc kubenswrapper[4603]: E0930 19:48:16.763679 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.764008 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:16 crc kubenswrapper[4603]: E0930 19:48:16.764134 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.764445 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:16 crc kubenswrapper[4603]: E0930 19:48:16.764555 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.763520 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:16 crc kubenswrapper[4603]: E0930 19:48:16.764818 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.784850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.784912 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.784929 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.784960 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.784980 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.889470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.889563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.889615 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.889641 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.889660 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.993034 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.993102 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.993119 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.993142 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:16 crc kubenswrapper[4603]: I0930 19:48:16.993191 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:16Z","lastTransitionTime":"2025-09-30T19:48:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.096068 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.096120 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.096137 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.096160 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.096229 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.198804 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.198865 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.198883 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.198908 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.198925 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.301626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.301696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.301718 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.301746 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.301767 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.405237 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.405305 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.405329 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.405357 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.405378 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.508578 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.508635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.508651 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.508675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.508692 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.598580 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.598648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.598665 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.598692 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.598709 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.620200 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.632362 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.632451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.632475 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.632520 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.632579 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.653285 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.658815 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.658887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.658910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.658938 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.658959 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.683365 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.687503 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.687538 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.687546 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.687560 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.687571 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.707013 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.711004 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.711056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.711073 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.711096 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.711113 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.729731 4603 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T19:48:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"108450f4-bea7-4e7f-9d53-eb895322e83f\\\",\\\"systemUUID\\\":\\\"f7fb4ae0-02bb-4f95-9dd5-5b62ad1492d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:17Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:17 crc kubenswrapper[4603]: E0930 19:48:17.729953 4603 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.731523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.731572 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.731588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.731611 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.731628 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.834747 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.834800 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.834818 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.834841 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.834861 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.937370 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.937393 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.937401 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.937412 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:17 crc kubenswrapper[4603]: I0930 19:48:17.937420 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:17Z","lastTransitionTime":"2025-09-30T19:48:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.040972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.041027 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.041051 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.041077 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.041098 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.144896 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.144947 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.144958 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.144976 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.144990 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.248332 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.248451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.248476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.248509 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.248534 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.351576 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.351652 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.351676 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.351707 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.351729 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.454319 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.454377 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.454389 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.454404 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.454414 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.557441 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.557561 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.557579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.557604 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.557624 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.660237 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.660292 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.660308 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.660334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.660350 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.762985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763058 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763080 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763109 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763131 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763435 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763501 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763436 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.763629 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.763753 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.763745 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.764846 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.765112 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.765460 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.765644 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.783775 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.803977 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0e7528f-0687-49ac-8d6d-8d49e2fb5570\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://986b283903de5c95fd1fe33a237e5ec04c4f9cef32041c464f91f77c6fc98f26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36a5e28f9abfda0b631ad1e41676982649cc792d47d69b0cd5544ceeeeee1bbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c431d391c79fff21ef89026597fb87ac1ed06e5c2893f02e489f7ee9797f382a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b8901b550be6e2cedea5174812faad4926f6e9b601cac88c71e7a98292c5092\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9643621a94c6ed078f67b81527bf53c546f5889919e0ff2280340eac52cb44e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759261619\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759261618\\\\\\\\\\\\\\\" (2025-09-30 18:46:58 +0000 UTC to 2026-09-30 18:46:58 +0000 UTC (now=2025-09-30 19:46:59.123487286 +0000 UTC))\\\\\\\"\\\\nI0930 19:46:59.123527 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0930 19:46:59.123548 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0930 19:46:59.123573 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123596 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0930 19:46:59.123626 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3078338593/tls.crt::/tmp/serving-cert-3078338593/tls.key\\\\\\\"\\\\nI0930 19:46:59.123743 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0930 19:46:59.123862 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123875 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0930 19:46:59.123889 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0930 19:46:59.123895 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0930 19:46:59.123957 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0930 19:46:59.123969 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0930 19:46:59.124732 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://197d489e83af98385ca63e0e7215eaef4ee7e1fe42085aae78b287aa946f6519\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2b0c0259967efdab396af6ae199628df4043553f1d60b1d6bf5b30366c4f517\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.822975 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57499a45-ddb2-4691-8c68-56596018a292\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9eca4d4dd8d8ee77bde1cb6576fa7364a74c688ebf0ec11158663b00f9e03f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1031a9bcfae3467d26770df32b2935e72c9fa1560800d41e154cc9740effde40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4793c7840f85c8240eeee8c2ae34bd5ddd155e51d770043c588d9cd5a7d2139\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffb816b9b1288bbc75ca22e250482e7b1b124dc506e3f6499e4fe73cfbc84dba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.837244 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10f70c9f3c359c4eb3f8338046ae9028792e141eeca56284b6a7d1e29fee70b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.857817 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.867298 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.867361 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.867396 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.867427 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.867450 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.904515 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"543e574d-42bb-453f-ade2-2e9b5904a3d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:48:01Z\\\",\\\"message\\\":\\\"30 19:48:01.587515 6548 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0930 19:48:01.587427 6548 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-gn9pm\\\\nI0930 19:48:01.587524 6548 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-gn9pm in node crc\\\\nI0930 19:48:01.587529 6548 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-gn9pm after 0 failed attempt(s)\\\\nF0930 19:48:01.587330 6548 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:01Z is after 2025-08-24T17:21:41Z]\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:48:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nkhtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-blpqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.919609 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-s4x8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc0db5c-2d9a-433f-8c96-48bb418919bc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6e525c6b1bf13eb8fba6850d3ddc4c0cf6aaab431e590341d70beb744a62efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j772b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:02Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-s4x8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.935158 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bbfca2a3-f179-4b9f-b207-be198308366b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9b18b31bb400f0b40509a2baeb2590778ef9751c82df22a54bbf32713c66f40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33cab1fde219796097a262d392ce5c7b8fb463ebe32c36c3bace09eb0ad81c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2m2s4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-r7dxb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.941830 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.942009 4603 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:48:18 crc kubenswrapper[4603]: E0930 19:48:18.942090 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs podName:895a054c-b0e6-418a-9e96-b941b6e1946d nodeName:}" failed. No retries permitted until 2025-09-30 19:49:22.942063322 +0000 UTC m=+164.880522180 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs") pod "network-metrics-daemon-pwrc5" (UID: "895a054c-b0e6-418a-9e96-b941b6e1946d") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.949633 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ff270b4-4b95-47ee-96a1-1916c7a2b17a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fa33729df39dac39eb5ac092f4dcce8214c97279f6d6828ba2ba90051d77ed6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a4e89d62b1c5ffc4522ad681e1b6d8bf0c712eaee71ea2fb0921fee673ce745\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.969358 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.971806 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.971871 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.971895 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.971925 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.971948 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:18Z","lastTransitionTime":"2025-09-30T19:48:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:18 crc kubenswrapper[4603]: I0930 19:48:18.984377 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"895a054c-b0e6-418a-9e96-b941b6e1946d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27lrl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:14Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pwrc5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:18Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.018625 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b2d7f80-07cc-4b41-a425-790c143444df\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9f782fa9240d87d0243b8bca592fbe6aaa9c6acec0f8bdd1891e8faa02c8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdd7af980975ed9eb11145de47abe761f5706b439d1f657c9f74123a92450434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dda3eb1f012fcddd1cf943a29ae83678c98089cb7ccd10c19eba403281d2b034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b90c8bc4859808ec3f46a5b20b7b49824b49b6d2186b9ddad6bebdf50630d8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3730b24ab199fb09d7e4477cf8dcf4ad2d9bde57fc4282452c245b52a524be7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d2f82b7956caf124dee3cf8c8843b51d3392c109fb717510c183c22feac9516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9b2862b0648b150ebe683d441298f7ee73900e25ee7a444064c8e7cf97ad9e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77c56c5413067b1220e96df0212a6abdbd0d6ac202987fac681f05b834168006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.039835 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e47f3066-cf5c-4ee9-bea8-2e2e21f14b8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4edbe2ef713c257df9f69bfda2992e6a8fc8d88d205074ab066aa26536a9f9e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a663240030ea272fadba859dc2c363359e2043e24bc29af7e9a39122ae13b60e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://469795f86fa3d91cbcc35fc98663e21f1962fc459c4f31f5668ef3256f642dde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://14d4db94a229a7a62126b9b4c70d0bd559bc0aec51251c5bb3f53e31dcd7e02a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.054232 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn9pm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30de54b5-7eba-4480-971f-be5bf196d8b3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da60533ca38a022d23ab5fe9a3f8f455588cb59d43d651b71fac54425e4cc561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qw8d8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:46:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn9pm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.075018 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.075074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.075097 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.075123 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.075143 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.074788 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6sgvc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0d17316-8ee1-4df6-98b6-eefa64f035d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T19:47:47Z\\\",\\\"message\\\":\\\"2025-09-30T19:47:02+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203\\\\n2025-09-30T19:47:02+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_97ca2ff0-817b-4f37-b611-324a88d1d203 to /host/opt/cni/bin/\\\\n2025-09-30T19:47:02Z [verbose] multus-daemon started\\\\n2025-09-30T19:47:02Z [verbose] Readiness Indicator file check\\\\n2025-09-30T19:47:47Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8nsnc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6sgvc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.091921 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3adf7280-9c4a-403e-8605-b5e5897f3521\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca82d5eb13c37b2ed8af62d20b121d96975cb4eff1a60d56f32ea89f0ca7a966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d852n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8q5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.114479 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xwttq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b53f70a-60cb-4928-95ef-836c0e4170a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T19:47:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3d1a881652b8daa4ad942064e8e17daee0c7b948e6dbeb59659533e9d525d82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:47:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1195be82b80e28576d038cb99b12f1df430a1a37fc7604148263428a689f912a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bf57d17bfa614343d5e66e6142108f9cd41f506b31c28852cdffe8be9f071b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de62ae7e0ab3c775732dae4c02710fb3445764864bf679b12eb303922da5a8d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d50cd054181e453a2abdc8a90f486817962bc8a05e8d4e3c2f7b26782f88916\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://814da9938b63ac9ef80ae8c9b03097e519eeb2c5809c4710407303908a0c3ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://936b33a7a8e8c0474cc5c68a66028a18aa85428be2678dcd075df3dcf470af2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T19:47:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T19:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T19:47:00Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xwttq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.137430 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58bf5ab6198b49f8febd1738548a3e8d8f3f9931e7e29b70f876162761bf68ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.157717 4603 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T19:46:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f48fc99903a198fea767b0fdc6fe4fc9b6be42b90f7f95936004f5aaf603a4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a827c15c48be7479960da26a62cffe44031d4665b6f050c4b06144420680bec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T19:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T19:48:19Z is after 2025-08-24T17:21:41Z" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.178610 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.178657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.178675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.178698 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.178716 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.281757 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.281808 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.281823 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.281845 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.281859 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.384026 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.384069 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.384081 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.384099 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.384114 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.487068 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.487129 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.487153 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.487220 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.487245 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.590351 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.590406 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.590418 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.590437 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.590449 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.692799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.692840 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.692853 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.692872 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.692885 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.796392 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.796450 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.796472 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.796499 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.796572 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.899630 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.899699 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.899713 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.899730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:19 crc kubenswrapper[4603]: I0930 19:48:19.899742 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:19Z","lastTransitionTime":"2025-09-30T19:48:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.002972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.003022 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.003037 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.003056 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.003070 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.106254 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.106293 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.106304 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.106321 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.106333 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.209235 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.209284 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.209296 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.209313 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.209327 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.312504 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.312550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.312561 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.312579 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.312590 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.414819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.414919 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.414939 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.414963 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.414980 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.518155 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.518242 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.518266 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.518293 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.518315 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.621849 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.621922 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.621946 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.621975 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.621997 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.725120 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.725194 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.725208 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.725227 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.725238 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.763917 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.763985 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.764023 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:20 crc kubenswrapper[4603]: E0930 19:48:20.764152 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.764326 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:20 crc kubenswrapper[4603]: E0930 19:48:20.764418 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:20 crc kubenswrapper[4603]: E0930 19:48:20.764578 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:20 crc kubenswrapper[4603]: E0930 19:48:20.764742 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.828482 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.828520 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.828530 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.828547 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.828560 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.931017 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.931057 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.931068 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.931084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:20 crc kubenswrapper[4603]: I0930 19:48:20.931094 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:20Z","lastTransitionTime":"2025-09-30T19:48:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.034072 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.034117 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.034133 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.034155 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.034215 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.136946 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.136985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.136993 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.137007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.137016 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.241497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.241799 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.241870 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.241949 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.242059 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.345421 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.345469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.345479 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.345497 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.345508 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.448918 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.448972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.448985 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.449007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.449018 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.552035 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.552108 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.552125 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.552150 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.552201 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.657135 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.657290 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.657307 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.657335 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.657352 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.760723 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.760787 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.760804 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.760830 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.760850 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.864632 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.864716 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.864733 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.864765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.864781 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.968469 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.968543 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.968567 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.968601 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:21 crc kubenswrapper[4603]: I0930 19:48:21.968622 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:21Z","lastTransitionTime":"2025-09-30T19:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.072086 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.072140 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.072192 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.072225 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.072243 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.175580 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.175614 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.175622 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.175635 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.175644 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.278760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.279247 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.279443 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.279605 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.279761 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.382760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.382805 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.382816 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.382829 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.382842 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.485419 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.485466 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.485482 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.485500 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.485512 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.588609 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.588648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.588661 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.588676 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.588686 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.691668 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.691749 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.691773 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.691800 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.691822 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.763643 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.763709 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.763739 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.763709 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:22 crc kubenswrapper[4603]: E0930 19:48:22.763914 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:22 crc kubenswrapper[4603]: E0930 19:48:22.764158 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:22 crc kubenswrapper[4603]: E0930 19:48:22.764332 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:22 crc kubenswrapper[4603]: E0930 19:48:22.764473 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.795436 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.795532 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.795550 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.795611 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.795630 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.898346 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.898385 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.898403 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.898425 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:22 crc kubenswrapper[4603]: I0930 19:48:22.898441 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:22Z","lastTransitionTime":"2025-09-30T19:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.001414 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.001456 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.001466 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.001481 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.001492 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.103543 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.103588 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.103602 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.103618 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.103629 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.205678 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.205715 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.205726 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.205741 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.205752 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.308750 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.308792 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.308802 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.308817 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.308827 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.411582 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.411644 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.411667 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.411694 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.411715 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.514257 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.514302 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.514317 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.514334 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.514347 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.617256 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.617329 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.617352 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.617380 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.617398 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.720416 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.720476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.720494 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.720519 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.720537 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.823781 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.823822 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.823833 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.823850 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.823861 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.926765 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.927257 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.927637 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.927942 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:23 crc kubenswrapper[4603]: I0930 19:48:23.928268 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:23Z","lastTransitionTime":"2025-09-30T19:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.031612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.032097 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.032508 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.032677 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.032803 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.136336 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.136491 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.136516 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.136539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.136556 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.239230 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.239304 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.239322 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.239343 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.239359 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.341988 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.342055 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.342071 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.342095 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.342112 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.444441 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.444505 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.444523 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.444548 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.444564 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.547612 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.547659 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.547669 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.547685 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.547697 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.650669 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.650696 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.650704 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.650716 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.650724 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.753195 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.753250 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.753267 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.753305 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.753322 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.764885 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:24 crc kubenswrapper[4603]: E0930 19:48:24.765036 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.765297 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.765307 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.765347 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:24 crc kubenswrapper[4603]: E0930 19:48:24.765704 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:24 crc kubenswrapper[4603]: E0930 19:48:24.765488 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:24 crc kubenswrapper[4603]: E0930 19:48:24.765767 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.856404 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.856460 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.856476 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.856500 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.856517 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.959762 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.959817 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.959828 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.959846 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:24 crc kubenswrapper[4603]: I0930 19:48:24.959863 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:24Z","lastTransitionTime":"2025-09-30T19:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.062839 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.062901 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.062910 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.062923 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.062931 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.166363 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.166420 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.166442 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.166470 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.166494 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.270267 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.270332 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.270353 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.270376 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.270396 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.373957 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.373996 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.374007 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.374025 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.374042 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.476563 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.476608 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.476626 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.476648 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.476665 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.579689 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.579743 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.579760 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.579784 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.579801 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.682483 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.682529 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.682539 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.682554 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.682565 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.785363 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.785748 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.785966 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.786155 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.786360 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.889219 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.889271 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.889289 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.889310 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.889326 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.992779 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.992887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.992905 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.992930 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:25 crc kubenswrapper[4603]: I0930 19:48:25.992947 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:25Z","lastTransitionTime":"2025-09-30T19:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.096394 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.096433 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.096446 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.096462 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.096473 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.199406 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.199451 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.199467 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.199488 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.199506 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.301952 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.301986 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.301995 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.302010 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.302019 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.403816 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.404100 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.404472 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.404657 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.404812 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.507511 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.507778 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.507870 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.507992 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.508156 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.610441 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.610972 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.611074 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.611209 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.611286 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.713793 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.713879 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.713898 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.713920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.713937 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.763973 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:26 crc kubenswrapper[4603]: E0930 19:48:26.764273 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.763988 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:26 crc kubenswrapper[4603]: E0930 19:48:26.764472 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.763973 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:26 crc kubenswrapper[4603]: E0930 19:48:26.764617 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.764125 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:26 crc kubenswrapper[4603]: E0930 19:48:26.764829 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.815887 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.815913 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.815922 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.815934 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.815945 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.918439 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.918753 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.918764 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.918776 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:26 crc kubenswrapper[4603]: I0930 19:48:26.918785 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:26Z","lastTransitionTime":"2025-09-30T19:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.021810 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.022268 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.022487 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.022668 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.022820 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.126414 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.126675 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.126920 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.127152 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.127450 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.231118 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.231223 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.231248 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.231275 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.231295 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.334801 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.334856 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.334879 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.334907 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.334926 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.439060 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.439409 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.439541 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.439716 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.439842 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.543562 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.543631 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.543649 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.543673 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.543695 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.646974 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.647041 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.647059 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.647084 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.647103 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.749828 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.750298 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.750555 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.750819 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.751018 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.854664 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.854706 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.854716 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.854730 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.854741 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.883317 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.883355 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.883367 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.883382 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.883392 4603 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T19:48:27Z","lastTransitionTime":"2025-09-30T19:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.951561 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b"] Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.952094 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.954637 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.954840 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.955012 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 19:48:27 crc kubenswrapper[4603]: I0930 19:48:27.956353 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.011400 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-gn9pm" podStartSLOduration=90.011374532 podStartE2EDuration="1m30.011374532s" podCreationTimestamp="2025-09-30 19:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.010889647 +0000 UTC m=+109.949348465" watchObservedRunningTime="2025-09-30 19:48:28.011374532 +0000 UTC m=+109.949833350" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.041659 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.041739 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.041808 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.041867 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.042010 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.043402 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-6sgvc" podStartSLOduration=89.043385585 podStartE2EDuration="1m29.043385585s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.026678723 +0000 UTC m=+109.965137541" watchObservedRunningTime="2025-09-30 19:48:28.043385585 +0000 UTC m=+109.981844413" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.043702 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podStartSLOduration=89.043690814 podStartE2EDuration="1m29.043690814s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.04321732 +0000 UTC m=+109.981676138" watchObservedRunningTime="2025-09-30 19:48:28.043690814 +0000 UTC m=+109.982149672" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.072144 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-xwttq" podStartSLOduration=89.072105265 podStartE2EDuration="1m29.072105265s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.071587771 +0000 UTC m=+110.010046609" watchObservedRunningTime="2025-09-30 19:48:28.072105265 +0000 UTC m=+110.010564093" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.127559 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.12752476 podStartE2EDuration="1m29.12752476s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.109698307 +0000 UTC m=+110.048157125" watchObservedRunningTime="2025-09-30 19:48:28.12752476 +0000 UTC m=+110.065983578" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.142624 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=53.142602135 podStartE2EDuration="53.142602135s" podCreationTimestamp="2025-09-30 19:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.127444928 +0000 UTC m=+110.065903746" watchObservedRunningTime="2025-09-30 19:48:28.142602135 +0000 UTC m=+110.081060953" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.143272 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.143338 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.143359 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.143384 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.143405 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.144502 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.144559 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.145084 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.156978 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.157530 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=26.157512817 podStartE2EDuration="26.157512817s" podCreationTimestamp="2025-09-30 19:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.156635661 +0000 UTC m=+110.095094479" watchObservedRunningTime="2025-09-30 19:48:28.157512817 +0000 UTC m=+110.095971635" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.167927 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d508d2c4-e6b1-48f5-a368-b6edb0fa94b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4sw9b\" (UID: \"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.239378 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-s4x8t" podStartSLOduration=89.239362567 podStartE2EDuration="1m29.239362567s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.238887584 +0000 UTC m=+110.177346432" watchObservedRunningTime="2025-09-30 19:48:28.239362567 +0000 UTC m=+110.177821385" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.250814 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-r7dxb" podStartSLOduration=88.250795609 podStartE2EDuration="1m28.250795609s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.250655815 +0000 UTC m=+110.189114643" watchObservedRunningTime="2025-09-30 19:48:28.250795609 +0000 UTC m=+110.189254437" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.271009 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.276224 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=22.276207767 podStartE2EDuration="22.276207767s" podCreationTimestamp="2025-09-30 19:48:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.274789287 +0000 UTC m=+110.213248115" watchObservedRunningTime="2025-09-30 19:48:28.276207767 +0000 UTC m=+110.214666595" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.293110 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.293091563 podStartE2EDuration="1m28.293091563s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:28.291115157 +0000 UTC m=+110.229573985" watchObservedRunningTime="2025-09-30 19:48:28.293091563 +0000 UTC m=+110.231550381" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.387113 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" event={"ID":"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7","Type":"ContainerStarted","Data":"f69f2addd15dab9e9e2db4c488a73cf4067ffff32a98b8326dd54df2099fe4b6"} Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.763592 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.763639 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.763666 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:28 crc kubenswrapper[4603]: E0930 19:48:28.765876 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:28 crc kubenswrapper[4603]: I0930 19:48:28.765911 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:28 crc kubenswrapper[4603]: E0930 19:48:28.766043 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:28 crc kubenswrapper[4603]: E0930 19:48:28.766196 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:28 crc kubenswrapper[4603]: E0930 19:48:28.766318 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:29 crc kubenswrapper[4603]: I0930 19:48:29.392665 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" event={"ID":"d508d2c4-e6b1-48f5-a368-b6edb0fa94b7","Type":"ContainerStarted","Data":"d4fa6b3c4d8d02ec2c2c08998b453ff147841eba6d8f02d8d47337755bed449b"} Sep 30 19:48:30 crc kubenswrapper[4603]: I0930 19:48:30.763698 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:30 crc kubenswrapper[4603]: I0930 19:48:30.763793 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:30 crc kubenswrapper[4603]: E0930 19:48:30.763879 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:30 crc kubenswrapper[4603]: E0930 19:48:30.763996 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:30 crc kubenswrapper[4603]: I0930 19:48:30.764112 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:30 crc kubenswrapper[4603]: E0930 19:48:30.764345 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:30 crc kubenswrapper[4603]: I0930 19:48:30.764556 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:30 crc kubenswrapper[4603]: E0930 19:48:30.764705 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:31 crc kubenswrapper[4603]: I0930 19:48:31.764653 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:48:31 crc kubenswrapper[4603]: E0930 19:48:31.765046 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-blpqj_openshift-ovn-kubernetes(543e574d-42bb-453f-ade2-2e9b5904a3d3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" Sep 30 19:48:32 crc kubenswrapper[4603]: I0930 19:48:32.763972 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:32 crc kubenswrapper[4603]: I0930 19:48:32.764036 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:32 crc kubenswrapper[4603]: I0930 19:48:32.764036 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:32 crc kubenswrapper[4603]: I0930 19:48:32.764055 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:32 crc kubenswrapper[4603]: E0930 19:48:32.764228 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:32 crc kubenswrapper[4603]: E0930 19:48:32.764354 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:32 crc kubenswrapper[4603]: E0930 19:48:32.764515 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:32 crc kubenswrapper[4603]: E0930 19:48:32.764656 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.409680 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/1.log" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.410390 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/0.log" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.410427 4603 generic.go:334] "Generic (PLEG): container finished" podID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" containerID="6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8" exitCode=1 Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.410456 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerDied","Data":"6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8"} Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.410486 4603 scope.go:117] "RemoveContainer" containerID="bb53adea05f6a19bb805f3d9d0700b27ebaffffbadc24b849d3732a496d9d12b" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.410810 4603 scope.go:117] "RemoveContainer" containerID="6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8" Sep 30 19:48:34 crc kubenswrapper[4603]: E0930 19:48:34.410947 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-6sgvc_openshift-multus(d0d17316-8ee1-4df6-98b6-eefa64f035d9)\"" pod="openshift-multus/multus-6sgvc" podUID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.436274 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4sw9b" podStartSLOduration=95.43625805 podStartE2EDuration="1m35.43625805s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:29.412991474 +0000 UTC m=+111.351450322" watchObservedRunningTime="2025-09-30 19:48:34.43625805 +0000 UTC m=+116.374716868" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.764085 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:34 crc kubenswrapper[4603]: E0930 19:48:34.764463 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.764231 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:34 crc kubenswrapper[4603]: E0930 19:48:34.764651 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.764184 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:34 crc kubenswrapper[4603]: E0930 19:48:34.764841 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:34 crc kubenswrapper[4603]: I0930 19:48:34.764291 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:34 crc kubenswrapper[4603]: E0930 19:48:34.765022 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:35 crc kubenswrapper[4603]: I0930 19:48:35.416766 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/1.log" Sep 30 19:48:36 crc kubenswrapper[4603]: I0930 19:48:36.764062 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:36 crc kubenswrapper[4603]: I0930 19:48:36.764137 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:36 crc kubenswrapper[4603]: I0930 19:48:36.764262 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:36 crc kubenswrapper[4603]: E0930 19:48:36.764269 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:36 crc kubenswrapper[4603]: E0930 19:48:36.764391 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:36 crc kubenswrapper[4603]: E0930 19:48:36.764525 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:36 crc kubenswrapper[4603]: I0930 19:48:36.765361 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:36 crc kubenswrapper[4603]: E0930 19:48:36.765592 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:38 crc kubenswrapper[4603]: I0930 19:48:38.763707 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:38 crc kubenswrapper[4603]: I0930 19:48:38.763818 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:38 crc kubenswrapper[4603]: I0930 19:48:38.765011 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.766035 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:38 crc kubenswrapper[4603]: I0930 19:48:38.766100 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.766117 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.766208 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.766308 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.766852 4603 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 19:48:38 crc kubenswrapper[4603]: E0930 19:48:38.874686 4603 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:48:40 crc kubenswrapper[4603]: I0930 19:48:40.763813 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:40 crc kubenswrapper[4603]: E0930 19:48:40.764021 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:40 crc kubenswrapper[4603]: I0930 19:48:40.764473 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:40 crc kubenswrapper[4603]: I0930 19:48:40.764920 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:40 crc kubenswrapper[4603]: E0930 19:48:40.765023 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:40 crc kubenswrapper[4603]: E0930 19:48:40.765365 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:40 crc kubenswrapper[4603]: I0930 19:48:40.765423 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:40 crc kubenswrapper[4603]: E0930 19:48:40.765769 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:42 crc kubenswrapper[4603]: I0930 19:48:42.763681 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:42 crc kubenswrapper[4603]: I0930 19:48:42.763732 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:42 crc kubenswrapper[4603]: I0930 19:48:42.763733 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:42 crc kubenswrapper[4603]: I0930 19:48:42.763792 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:42 crc kubenswrapper[4603]: E0930 19:48:42.763884 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:42 crc kubenswrapper[4603]: E0930 19:48:42.764009 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:42 crc kubenswrapper[4603]: E0930 19:48:42.764088 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:42 crc kubenswrapper[4603]: E0930 19:48:42.764234 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:43 crc kubenswrapper[4603]: I0930 19:48:43.765679 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:48:43 crc kubenswrapper[4603]: E0930 19:48:43.876653 4603 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.447703 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/3.log" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.450230 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerStarted","Data":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.450658 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.750410 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podStartSLOduration=105.750376803 podStartE2EDuration="1m45.750376803s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:48:44.476566854 +0000 UTC m=+126.415025692" watchObservedRunningTime="2025-09-30 19:48:44.750376803 +0000 UTC m=+126.688835661" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.752715 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pwrc5"] Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.752887 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:44 crc kubenswrapper[4603]: E0930 19:48:44.753063 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.763630 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.763704 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.763753 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:44 crc kubenswrapper[4603]: E0930 19:48:44.763988 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:44 crc kubenswrapper[4603]: I0930 19:48:44.764077 4603 scope.go:117] "RemoveContainer" containerID="6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8" Sep 30 19:48:44 crc kubenswrapper[4603]: E0930 19:48:44.764105 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:44 crc kubenswrapper[4603]: E0930 19:48:44.764210 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:45 crc kubenswrapper[4603]: I0930 19:48:45.454959 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/1.log" Sep 30 19:48:45 crc kubenswrapper[4603]: I0930 19:48:45.455357 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerStarted","Data":"637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b"} Sep 30 19:48:46 crc kubenswrapper[4603]: I0930 19:48:46.763810 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:46 crc kubenswrapper[4603]: I0930 19:48:46.763897 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:46 crc kubenswrapper[4603]: I0930 19:48:46.763923 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:46 crc kubenswrapper[4603]: I0930 19:48:46.763954 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:46 crc kubenswrapper[4603]: E0930 19:48:46.763991 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:46 crc kubenswrapper[4603]: E0930 19:48:46.764133 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:46 crc kubenswrapper[4603]: E0930 19:48:46.764345 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:46 crc kubenswrapper[4603]: E0930 19:48:46.764681 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:48 crc kubenswrapper[4603]: I0930 19:48:48.764005 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:48 crc kubenswrapper[4603]: I0930 19:48:48.764078 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:48 crc kubenswrapper[4603]: I0930 19:48:48.764101 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:48 crc kubenswrapper[4603]: E0930 19:48:48.766476 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 19:48:48 crc kubenswrapper[4603]: I0930 19:48:48.766513 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:48 crc kubenswrapper[4603]: E0930 19:48:48.766661 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 19:48:48 crc kubenswrapper[4603]: E0930 19:48:48.766814 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pwrc5" podUID="895a054c-b0e6-418a-9e96-b941b6e1946d" Sep 30 19:48:48 crc kubenswrapper[4603]: E0930 19:48:48.766940 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.763962 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.764009 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.764094 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.764421 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.766986 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.767514 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.767839 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.768081 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.768233 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 19:48:50 crc kubenswrapper[4603]: I0930 19:48:50.768500 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.696861 4603 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.770652 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8jxkn"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.771427 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.771632 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.771810 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.773405 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-g965c"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.774153 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.788503 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.789009 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.789536 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.789573 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.789796 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.791603 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.792155 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.794430 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.794691 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.794991 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.795409 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.797770 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv7tl"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.798464 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.799769 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.799814 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: W0930 19:48:58.800135 4603 reflector.go:561] object-"openshift-route-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:48:58 crc kubenswrapper[4603]: E0930 19:48:58.800189 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.800139 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.800587 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.800700 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.800793 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: W0930 19:48:58.800902 4603 reflector.go:561] object-"openshift-route-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:48:58 crc kubenswrapper[4603]: E0930 19:48:58.800928 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.800978 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.801121 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.801297 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.801339 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.801403 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.801626 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.802094 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.805774 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.806180 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.807295 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.809364 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.809547 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.809778 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.809920 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.810032 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.810458 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.822346 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.823286 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.843957 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.844240 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.844323 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.844176 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.850956 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.851499 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.852735 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.853043 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.853384 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.853616 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.850883 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.855479 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.855611 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.855743 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.855925 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856097 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856238 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856396 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856552 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856679 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856829 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.856955 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.857037 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.860019 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.862621 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.863424 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.863836 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.864852 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.865232 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.866902 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2d2ld"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.867459 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gvrsj"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.867652 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.868033 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fgjdh"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.868228 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.868990 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.870357 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.870794 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.871497 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872409 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.871938 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874717 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.871964 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874781 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872111 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872140 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872189 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872218 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872239 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872264 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872615 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875321 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872711 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875413 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872744 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872767 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872795 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872915 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875609 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.872938 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.873601 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.873604 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.873699 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875762 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874156 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874298 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875857 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874298 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874342 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874465 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875942 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.874310 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876237 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876454 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876642 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-hswfh"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876683 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.875322 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876747 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.877131 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.876781 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.877244 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.877826 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.877924 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.877962 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.878037 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.878110 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.878704 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.879212 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.890376 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.890963 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891061 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891180 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891294 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjc6m\" (UniqueName: \"kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891373 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891457 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891540 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891625 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2svcv\" (UniqueName: \"kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891706 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891795 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-425j2\" (UniqueName: \"kubernetes.io/projected/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-kube-api-access-425j2\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.891881 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.892071 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-serving-cert\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.901851 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.905827 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.913318 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.914861 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.914931 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.915282 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.915614 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.915853 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.916137 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.917371 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.917508 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.917927 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.918032 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.918143 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.918278 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.921886 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.922016 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.922177 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.926908 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.927901 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.929415 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cb8bv"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.929557 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.929731 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.929810 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p72j4"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.930022 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.931904 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.932217 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.932432 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.932666 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.932499 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.933684 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.934070 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.934270 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.934987 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.936345 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.944258 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.944989 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.945388 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.945520 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.947303 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.947933 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.950583 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.951871 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.952935 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.956450 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.959689 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-cffbx"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.960576 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.960952 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.961526 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.980078 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.983146 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-g965c"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993074 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjc6m\" (UniqueName: \"kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993117 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/871dd358-5fc9-4438-857a-193463cc9a9a-metrics-tls\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993136 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993155 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993203 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993219 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-node-pullsecrets\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993237 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993252 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-serving-cert\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993270 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993285 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993300 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-client\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993315 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-serving-cert\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993331 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993349 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993363 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2729\" (UniqueName: \"kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993378 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-image-import-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993394 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993412 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-auth-proxy-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993436 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhqtf\" (UniqueName: \"kubernetes.io/projected/c78675e4-742c-4851-b9b9-97c086a35138-kube-api-access-dhqtf\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993457 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993472 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7srp8\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-kube-api-access-7srp8\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993485 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993502 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993516 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptc25\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993531 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/871dd358-5fc9-4438-857a-193463cc9a9a-trusted-ca\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993546 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993561 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993569 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993589 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chdg8\" (UniqueName: \"kubernetes.io/projected/007744a9-1794-4ff2-ba1d-f7c6d794f987-kube-api-access-chdg8\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993610 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993625 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993639 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-encryption-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993678 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jphx\" (UniqueName: \"kubernetes.io/projected/79714c45-e39c-431c-b45a-eb244926ced5-kube-api-access-9jphx\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993696 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2svcv\" (UniqueName: \"kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993715 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993729 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit-dir\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993745 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4mcs\" (UniqueName: \"kubernetes.io/projected/2aa0c946-2015-4a51-b4a6-be84957d1ffa-kube-api-access-z4mcs\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993760 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993776 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993793 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993809 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993832 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-425j2\" (UniqueName: \"kubernetes.io/projected/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-kube-api-access-425j2\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993847 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993866 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993881 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-serving-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993898 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993913 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993931 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-serving-cert\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993947 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-service-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993962 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-dir\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993978 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-client\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.993992 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-config\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994007 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994029 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-encryption-config\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994043 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994059 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994073 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-policies\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994088 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994103 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2aa0c946-2015-4a51-b4a6-be84957d1ffa-serving-cert\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994119 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994135 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994155 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994229 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994244 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79714c45-e39c-431c-b45a-eb244926ced5-metrics-tls\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994259 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994274 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994291 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c78675e4-742c-4851-b9b9-97c086a35138-machine-approver-tls\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994305 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dv8s\" (UniqueName: \"kubernetes.io/projected/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-kube-api-access-8dv8s\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.994319 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:58 crc kubenswrapper[4603]: E0930 19:48:58.994648 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.494637612 +0000 UTC m=+141.433096430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.995370 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.995678 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8jxkn"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.995688 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.997924 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.998644 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:58 crc kubenswrapper[4603]: I0930 19:48:58.998882 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:58.999873 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.002629 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-8s2lm"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.019399 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-available-featuregates\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.021785 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.022276 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.024102 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.024595 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-serving-cert\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.031929 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.031970 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2d2ld"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.031982 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.031991 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032000 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032008 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032016 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032029 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032037 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032046 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.032123 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8s2lm" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.034518 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.035746 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.038055 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.038108 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv7tl"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.041014 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.041119 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fgjdh"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.042862 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.043787 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.044851 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gvrsj"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.046874 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5lmct"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.047852 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.047965 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.048206 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cb8bv"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.056721 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.057396 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8s2lm"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.060840 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.061574 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.062614 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5lmct"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.063551 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.064666 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.065608 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.067788 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.069836 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.070676 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.072075 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p72j4"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.073491 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.073844 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.074440 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-cffbx"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.075681 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-89sx8"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.076539 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-z2tgs"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.076677 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.078313 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-89sx8"] Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.078792 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.093612 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094624 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094774 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-encryption-config\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094824 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px95m\" (UniqueName: \"kubernetes.io/projected/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-kube-api-access-px95m\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094849 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a64ba70-8f84-4334-8e7f-df515a31a3c5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094872 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-images\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094897 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.094931 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.594910996 +0000 UTC m=+141.533369814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.094964 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-policies\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095005 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095025 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2aa0c946-2015-4a51-b4a6-be84957d1ffa-serving-cert\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095048 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-images\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095079 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095102 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79714c45-e39c-431c-b45a-eb244926ced5-metrics-tls\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095135 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095182 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095198 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095213 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c78675e4-742c-4851-b9b9-97c086a35138-machine-approver-tls\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095231 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dv8s\" (UniqueName: \"kubernetes.io/projected/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-kube-api-access-8dv8s\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095246 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095263 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-config\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095306 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/871dd358-5fc9-4438-857a-193463cc9a9a-metrics-tls\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095324 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095344 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095359 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-node-pullsecrets\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095376 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnl9l\" (UniqueName: \"kubernetes.io/projected/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-kube-api-access-rnl9l\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095396 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095411 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-serving-cert\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095428 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-config\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095443 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-client\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095459 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-serving-cert\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095483 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095498 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2729\" (UniqueName: \"kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095514 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a64ba70-8f84-4334-8e7f-df515a31a3c5-config\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095545 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095576 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-image-import-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095591 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095603 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095606 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-auth-proxy-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095652 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhqtf\" (UniqueName: \"kubernetes.io/projected/c78675e4-742c-4851-b9b9-97c086a35138-kube-api-access-dhqtf\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095673 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj4pn\" (UniqueName: \"kubernetes.io/projected/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-kube-api-access-gj4pn\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095700 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095718 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7srp8\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-kube-api-access-7srp8\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095735 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095751 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095767 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095782 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptc25\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095800 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/871dd358-5fc9-4438-857a-193463cc9a9a-trusted-ca\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095815 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095834 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chdg8\" (UniqueName: \"kubernetes.io/projected/007744a9-1794-4ff2-ba1d-f7c6d794f987-kube-api-access-chdg8\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095850 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-serving-cert\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095868 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jphx\" (UniqueName: \"kubernetes.io/projected/79714c45-e39c-431c-b45a-eb244926ced5-kube-api-access-9jphx\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095891 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095908 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095924 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-encryption-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095950 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095969 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit-dir\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.095989 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4mcs\" (UniqueName: \"kubernetes.io/projected/2aa0c946-2015-4a51-b4a6-be84957d1ffa-kube-api-access-z4mcs\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096011 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096034 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096056 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096085 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a64ba70-8f84-4334-8e7f-df515a31a3c5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096104 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-proxy-tls\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096131 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096150 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-trusted-ca\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096193 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-serving-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096209 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-auth-proxy-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096216 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096223 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096254 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096279 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-service-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096298 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096320 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096341 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-dir\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096361 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-client\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096384 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-config\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.096405 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.097006 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-policies\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.097337 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.097390 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-node-pullsecrets\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.097508 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.101003 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.101399 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.103249 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.104434 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.104522 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-image-import-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.104889 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78675e4-742c-4851-b9b9-97c086a35138-config\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.105285 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.105675 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.106051 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.60603559 +0000 UTC m=+141.544494408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.108628 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.109076 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-service-ca-bundle\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.109187 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/007744a9-1794-4ff2-ba1d-f7c6d794f987-audit-dir\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.109990 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.110542 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.110547 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/007744a9-1794-4ff2-ba1d-f7c6d794f987-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.110965 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-serving-ca\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.111006 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/871dd358-5fc9-4438-857a-193463cc9a9a-trusted-ca\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.112155 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.112497 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-serving-cert\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.112693 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-encryption-config\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.113121 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-etcd-client\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.113429 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2aa0c946-2015-4a51-b4a6-be84957d1ffa-serving-cert\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.113492 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.113996 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2aa0c946-2015-4a51-b4a6-be84957d1ffa-config\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.114180 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-audit-dir\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.114187 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-etcd-client\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.114521 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/007744a9-1794-4ff2-ba1d-f7c6d794f987-serving-cert\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.114957 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.115144 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.115450 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.115489 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.119175 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120439 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/79714c45-e39c-431c-b45a-eb244926ced5-metrics-tls\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120592 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120647 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120690 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120870 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c78675e4-742c-4851-b9b9-97c086a35138-machine-approver-tls\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.120881 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.121312 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/871dd358-5fc9-4438-857a-193463cc9a9a-metrics-tls\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.127459 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-encryption-config\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.133808 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.156187 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.173689 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.193449 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197271 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197381 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnl9l\" (UniqueName: \"kubernetes.io/projected/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-kube-api-access-rnl9l\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197410 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-config\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197438 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a64ba70-8f84-4334-8e7f-df515a31a3c5-config\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197476 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj4pn\" (UniqueName: \"kubernetes.io/projected/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-kube-api-access-gj4pn\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197501 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-serving-cert\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197544 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a64ba70-8f84-4334-8e7f-df515a31a3c5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197558 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-proxy-tls\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197585 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-trusted-ca\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197605 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197624 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197646 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a64ba70-8f84-4334-8e7f-df515a31a3c5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197662 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-images\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.197684 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.69766893 +0000 UTC m=+141.636127808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197710 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px95m\" (UniqueName: \"kubernetes.io/projected/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-kube-api-access-px95m\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197742 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-images\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.197780 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-config\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.198496 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-config\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.198910 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.199126 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-images\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.201000 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.214252 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.233523 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.253548 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.273991 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.293510 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.298976 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.299418 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.799404335 +0000 UTC m=+141.737863153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.313120 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.353483 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.373377 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.393806 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.399609 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.399866 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.899823733 +0000 UTC m=+141.838282591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.400334 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.400878 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:48:59.900860652 +0000 UTC m=+141.839319510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.414554 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.433833 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.455487 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.473816 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.493250 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.501368 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.501630 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.001574588 +0000 UTC m=+141.940033466 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.502035 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.502911 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.002817614 +0000 UTC m=+141.941276472 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.514371 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.534100 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.554037 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.574254 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.594940 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.602871 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.603099 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.103063327 +0000 UTC m=+142.041522175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.603540 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.604082 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.104067245 +0000 UTC m=+142.042526103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.613623 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.633828 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.653313 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.674732 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.682307 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a64ba70-8f84-4334-8e7f-df515a31a3c5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.695535 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.704619 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.704824 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.204799742 +0000 UTC m=+142.143258560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.705159 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.705502 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.205494341 +0000 UTC m=+142.143953159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.713453 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.733530 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.738749 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a64ba70-8f84-4334-8e7f-df515a31a3c5-config\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.754425 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.761947 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-proxy-tls\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.774213 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.793897 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.805817 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.806102 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.306070473 +0000 UTC m=+142.244529321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.806370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.806698 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.306686481 +0000 UTC m=+142.245145299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.814446 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.834232 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.839575 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-images\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.854233 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.874488 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.894618 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.906762 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.906954 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.406919003 +0000 UTC m=+142.345377851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.907093 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.907452 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.407440578 +0000 UTC m=+142.345899396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.914706 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.932379 4603 request.go:700] Waited for 1.002047747s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console-operator/configmaps?fieldSelector=metadata.name%3Dconsole-operator-config&limit=500&resourceVersion=0 Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.933816 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.940280 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-config\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.954472 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.974692 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 19:48:59 crc kubenswrapper[4603]: I0930 19:48:59.983539 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-serving-cert\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.995984 4603 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:48:59 crc kubenswrapper[4603]: E0930 19:48:59.996059 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca podName:c7503d65-f97f-45a5-94ec-9f210ea705c9 nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.496041702 +0000 UTC m=+142.434500530 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca") pod "route-controller-manager-6576b87f9c-6lk6w" (UID: "c7503d65-f97f-45a5-94ec-9f210ea705c9") : failed to sync configmap cache: timed out waiting for the condition Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.004295 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.007803 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.008373 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.508229686 +0000 UTC m=+142.446688544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.009040 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.009604 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.509577305 +0000 UTC m=+142.448036163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.010789 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-trusted-ca\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.015326 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.034304 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.054306 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.075116 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.094671 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.111055 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.111524 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.611488974 +0000 UTC m=+142.549947832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.112201 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.112780 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.612753371 +0000 UTC m=+142.551212239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.114866 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.137145 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.154677 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.175137 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.194678 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.214306 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.214417 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.214827 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.714738352 +0000 UTC m=+142.653197200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.215949 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.216824 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.716807481 +0000 UTC m=+142.655266329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.234342 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.266731 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.274646 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.296462 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.315201 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.317248 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.317478 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.817445565 +0000 UTC m=+142.755904433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.317767 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.319022 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.818985738 +0000 UTC m=+142.757444626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.335215 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.355269 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.374450 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.395055 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.414974 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.418563 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.418792 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.918729867 +0000 UTC m=+142.857188725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.419070 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.419560 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:00.91953892 +0000 UTC m=+142.857997828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.434627 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.455921 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.474983 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.494404 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.514513 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.520218 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.520671 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.521041 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.021010377 +0000 UTC m=+142.959469195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.533999 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.553926 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.574210 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.622252 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.622662 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.12264743 +0000 UTC m=+143.061106248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.635259 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-425j2\" (UniqueName: \"kubernetes.io/projected/3b3a59ee-ec27-4879-9a3b-e7004d4394d9-kube-api-access-425j2\") pod \"openshift-config-operator-7777fb866f-kmwk4\" (UID: \"3b3a59ee-ec27-4879-9a3b-e7004d4394d9\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.650939 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2svcv\" (UniqueName: \"kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv\") pod \"controller-manager-879f6c89f-7wjwr\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.673738 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.694950 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.713975 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.723813 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.723992 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.223957143 +0000 UTC m=+143.162415971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.724232 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.724576 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.22456177 +0000 UTC m=+143.163020728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.733664 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.746710 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.754077 4603 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.775628 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.795498 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.814779 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.826005 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.826589 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.326567262 +0000 UTC m=+143.265026120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.835381 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.854888 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.876944 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.894304 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.915845 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.932687 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.932833 4603 request.go:700] Waited for 1.835289135s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/registry/token Sep 30 19:49:00 crc kubenswrapper[4603]: E0930 19:49:00.933104 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.433084072 +0000 UTC m=+143.371542920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.950034 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.950501 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.978009 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2729\" (UniqueName: \"kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729\") pod \"oauth-openshift-558db77b4-7pz75\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.996573 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4"] Sep 30 19:49:00 crc kubenswrapper[4603]: I0930 19:49:00.997115 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhqtf\" (UniqueName: \"kubernetes.io/projected/c78675e4-742c-4851-b9b9-97c086a35138-kube-api-access-dhqtf\") pod \"machine-approver-56656f9798-fsfn4\" (UID: \"c78675e4-742c-4851-b9b9-97c086a35138\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.015791 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dv8s\" (UniqueName: \"kubernetes.io/projected/90a9f0f4-2f8d-4ce9-98fd-db23877e381f-kube-api-access-8dv8s\") pod \"apiserver-76f77b778f-8jxkn\" (UID: \"90a9f0f4-2f8d-4ce9-98fd-db23877e381f\") " pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.034015 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.034521 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.534496599 +0000 UTC m=+143.472955427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.052479 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7srp8\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-kube-api-access-7srp8\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.057382 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/871dd358-5fc9-4438-857a-193463cc9a9a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hbgcp\" (UID: \"871dd358-5fc9-4438-857a-193463cc9a9a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.083004 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptc25\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.085792 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.092187 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chdg8\" (UniqueName: \"kubernetes.io/projected/007744a9-1794-4ff2-ba1d-f7c6d794f987-kube-api-access-chdg8\") pod \"apiserver-7bbb656c7d-bp267\" (UID: \"007744a9-1794-4ff2-ba1d-f7c6d794f987\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.093800 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.111990 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jphx\" (UniqueName: \"kubernetes.io/projected/79714c45-e39c-431c-b45a-eb244926ced5-kube-api-access-9jphx\") pod \"dns-operator-744455d44c-jv7tl\" (UID: \"79714c45-e39c-431c-b45a-eb244926ced5\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.135617 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.136369 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4mcs\" (UniqueName: \"kubernetes.io/projected/2aa0c946-2015-4a51-b4a6-be84957d1ffa-kube-api-access-z4mcs\") pod \"authentication-operator-69f744f599-g965c\" (UID: \"2aa0c946-2015-4a51-b4a6-be84957d1ffa\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.136430 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.636415029 +0000 UTC m=+143.574873847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.136509 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.152795 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj4pn\" (UniqueName: \"kubernetes.io/projected/f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4-kube-api-access-gj4pn\") pod \"machine-api-operator-5694c8668f-fgjdh\" (UID: \"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.162946 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.171330 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9a64ba70-8f84-4334-8e7f-df515a31a3c5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5jw9f\" (UID: \"9a64ba70-8f84-4334-8e7f-df515a31a3c5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.177286 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.187520 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnl9l\" (UniqueName: \"kubernetes.io/projected/a4a6e1f1-d082-4737-83b9-0dbaad5338c5-kube-api-access-rnl9l\") pod \"machine-config-operator-74547568cd-9nfqj\" (UID: \"a4a6e1f1-d082-4737-83b9-0dbaad5338c5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:49:01 crc kubenswrapper[4603]: W0930 19:49:01.190996 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc21bde8a_fbf8_41cc_9ee1_9a0a02305e6c.slice/crio-ae9bb307b4fda4d8ac7ef0703efa9dc9b2bfa4e4fc740439a33e437e0ea91e8d WatchSource:0}: Error finding container ae9bb307b4fda4d8ac7ef0703efa9dc9b2bfa4e4fc740439a33e437e0ea91e8d: Status 404 returned error can't find the container with id ae9bb307b4fda4d8ac7ef0703efa9dc9b2bfa4e4fc740439a33e437e0ea91e8d Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.192648 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.209091 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px95m\" (UniqueName: \"kubernetes.io/projected/ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd-kube-api-access-px95m\") pod \"console-operator-58897d9998-cb8bv\" (UID: \"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd\") " pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.235665 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.236055 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.237331 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.73731426 +0000 UTC m=+143.675773078 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.239709 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjc6m\" (UniqueName: \"kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.254150 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.262480 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") pod \"route-controller-manager-6576b87f9c-6lk6w\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.263868 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.272879 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.273325 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.287310 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.299229 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.327549 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338036 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0c5692-7618-4522-9a4d-e10d7027c791-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338089 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338117 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rkm8\" (UniqueName: \"kubernetes.io/projected/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-kube-api-access-4rkm8\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338133 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjgd7\" (UniqueName: \"kubernetes.io/projected/1c61ac02-3732-4bf3-b488-ac09d35092f4-kube-api-access-cjgd7\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338499 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ng72\" (UniqueName: \"kubernetes.io/projected/7c0c5692-7618-4522-9a4d-e10d7027c791-kube-api-access-5ng72\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338543 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlqjx\" (UniqueName: \"kubernetes.io/projected/82e162e9-571a-4ccf-b6ff-d97b4996757a-kube-api-access-qlqjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338580 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338606 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0828680f-a2ec-4519-9704-206f7b7feb35-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338633 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b8e611-0eaf-40d4-8692-2e7cec96aafe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338648 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q6dk\" (UniqueName: \"kubernetes.io/projected/0828680f-a2ec-4519-9704-206f7b7feb35-kube-api-access-5q6dk\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338667 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c61ac02-3732-4bf3-b488-ac09d35092f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338682 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338712 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-default-certificate\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338728 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-service-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338744 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpdtp\" (UniqueName: \"kubernetes.io/projected/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-kube-api-access-gpdtp\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338760 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-config-volume\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338774 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/05a079f5-4049-4140-af22-5271a97fee7a-tmpfs\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338789 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpzr2\" (UniqueName: \"kubernetes.io/projected/be25c0f8-af76-4675-88c5-2ef8f85c8b64-kube-api-access-kpzr2\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338820 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp556\" (UniqueName: \"kubernetes.io/projected/3406e452-0d64-4882-bbcf-46486cbbb1d1-kube-api-access-qp556\") pod \"migrator-59844c95c7-2tk5z\" (UID: \"3406e452-0d64-4882-bbcf-46486cbbb1d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338835 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-webhook-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338852 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt9cr\" (UniqueName: \"kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7jf9\" (UniqueName: \"kubernetes.io/projected/baa66917-fa78-4b13-9bb9-aab63d5c7095-kube-api-access-n7jf9\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338889 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b79fc526-fe5c-4375-a347-68920eae4794-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338903 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338918 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.338994 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e0d835d7-9572-48dd-b237-cfa225e29d88-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339019 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4qd5\" (UniqueName: \"kubernetes.io/projected/71dd6812-e1e9-4014-a828-261649ee0ac7-kube-api-access-l4qd5\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339036 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339057 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr5sr\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-kube-api-access-fr5sr\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339082 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-serving-cert\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339096 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cad15f61-9169-4a88-adb8-bd63071736d0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339112 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-profile-collector-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339144 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-stats-auth\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339202 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgtvc\" (UniqueName: \"kubernetes.io/projected/722a6d6d-3382-415f-828c-db2fa023bbff-kube-api-access-vgtvc\") pod \"downloads-7954f5f757-2d2ld\" (UID: \"722a6d6d-3382-415f-828c-db2fa023bbff\") " pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339297 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w87ch\" (UniqueName: \"kubernetes.io/projected/e0d835d7-9572-48dd-b237-cfa225e29d88-kube-api-access-w87ch\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339320 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f8579e3d-102f-44ad-befd-fadc7bdf08ae-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339336 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjkvr\" (UniqueName: \"kubernetes.io/projected/cad15f61-9169-4a88-adb8-bd63071736d0-kube-api-access-wjkvr\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339351 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e162e9-571a-4ccf-b6ff-d97b4996757a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339365 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs4gg\" (UniqueName: \"kubernetes.io/projected/2ab2de99-0003-4d85-8cb8-fe347801f9d1-kube-api-access-gs4gg\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339387 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baa66917-fa78-4b13-9bb9-aab63d5c7095-config\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339403 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b8e611-0eaf-40d4-8692-2e7cec96aafe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339418 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8579e3d-102f-44ad-befd-fadc7bdf08ae-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339469 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ab2de99-0003-4d85-8cb8-fe347801f9d1-service-ca-bundle\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339483 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339537 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpbdg\" (UniqueName: \"kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339576 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvdb4\" (UniqueName: \"kubernetes.io/projected/05a079f5-4049-4140-af22-5271a97fee7a-kube-api-access-lvdb4\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339610 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339625 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2e94ed-63a4-4335-8edd-67b592965119-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339640 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339654 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsfgt\" (UniqueName: \"kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339669 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59f7faf2-57d2-4230-b6a8-88c30096e372-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339700 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339726 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-client\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339744 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm4rb\" (UniqueName: \"kubernetes.io/projected/84b8e611-0eaf-40d4-8692-2e7cec96aafe-kube-api-access-tm4rb\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339760 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f7faf2-57d2-4230-b6a8-88c30096e372-config\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339782 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-metrics-certs\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339807 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b79fc526-fe5c-4375-a347-68920eae4794-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339821 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-cabundle\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339837 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f7faf2-57d2-4230-b6a8-88c30096e372-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339877 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad15f61-9169-4a88-adb8-bd63071736d0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.339893 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdvm\" (UniqueName: \"kubernetes.io/projected/0e2e94ed-63a4-4335-8edd-67b592965119-kube-api-access-4vdvm\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.342092 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baa66917-fa78-4b13-9bb9-aab63d5c7095-serving-cert\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.342129 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-config\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.347767 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89jrg\" (UniqueName: \"kubernetes.io/projected/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-kube-api-access-89jrg\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.347956 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.348069 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.348216 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.351104 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c61ac02-3732-4bf3-b488-ac09d35092f4-proxy-tls\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.351547 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.851524748 +0000 UTC m=+143.789983566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.354914 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-srv-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.354976 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.351848 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.355642 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e162e9-571a-4ccf-b6ff-d97b4996757a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.355674 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-srv-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.355704 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b79fc526-fe5c-4375-a347-68920eae4794-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.355751 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-key\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.355791 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-metrics-tls\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.356355 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.461735 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462097 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462137 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4qd5\" (UniqueName: \"kubernetes.io/projected/71dd6812-e1e9-4014-a828-261649ee0ac7-kube-api-access-l4qd5\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462156 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr5sr\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-kube-api-access-fr5sr\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462200 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-serving-cert\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462215 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cad15f61-9169-4a88-adb8-bd63071736d0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462233 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-certs\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462248 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-stats-auth\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462265 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-profile-collector-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462289 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgtvc\" (UniqueName: \"kubernetes.io/projected/722a6d6d-3382-415f-828c-db2fa023bbff-kube-api-access-vgtvc\") pod \"downloads-7954f5f757-2d2ld\" (UID: \"722a6d6d-3382-415f-828c-db2fa023bbff\") " pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462308 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-csi-data-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462332 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w87ch\" (UniqueName: \"kubernetes.io/projected/e0d835d7-9572-48dd-b237-cfa225e29d88-kube-api-access-w87ch\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462353 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjkvr\" (UniqueName: \"kubernetes.io/projected/cad15f61-9169-4a88-adb8-bd63071736d0-kube-api-access-wjkvr\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e162e9-571a-4ccf-b6ff-d97b4996757a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462386 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f8579e3d-102f-44ad-befd-fadc7bdf08ae-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462400 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs4gg\" (UniqueName: \"kubernetes.io/projected/2ab2de99-0003-4d85-8cb8-fe347801f9d1-kube-api-access-gs4gg\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462417 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baa66917-fa78-4b13-9bb9-aab63d5c7095-config\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462435 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b8e611-0eaf-40d4-8692-2e7cec96aafe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462462 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8579e3d-102f-44ad-befd-fadc7bdf08ae-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462486 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ab2de99-0003-4d85-8cb8-fe347801f9d1-service-ca-bundle\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462500 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462514 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpbdg\" (UniqueName: \"kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462535 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvdb4\" (UniqueName: \"kubernetes.io/projected/05a079f5-4049-4140-af22-5271a97fee7a-kube-api-access-lvdb4\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462552 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462568 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2e94ed-63a4-4335-8edd-67b592965119-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462591 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462605 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsfgt\" (UniqueName: \"kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462621 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59f7faf2-57d2-4230-b6a8-88c30096e372-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462643 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-client\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462659 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm4rb\" (UniqueName: \"kubernetes.io/projected/84b8e611-0eaf-40d4-8692-2e7cec96aafe-kube-api-access-tm4rb\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462674 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f7faf2-57d2-4230-b6a8-88c30096e372-config\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462689 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-metrics-certs\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462705 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzvvw\" (UniqueName: \"kubernetes.io/projected/37afc549-2a93-48cb-85d9-ad284888e2f5-kube-api-access-dzvvw\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462722 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b79fc526-fe5c-4375-a347-68920eae4794-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462736 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-cabundle\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462771 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f7faf2-57d2-4230-b6a8-88c30096e372-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462788 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad15f61-9169-4a88-adb8-bd63071736d0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462802 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdvm\" (UniqueName: \"kubernetes.io/projected/0e2e94ed-63a4-4335-8edd-67b592965119-kube-api-access-4vdvm\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462818 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baa66917-fa78-4b13-9bb9-aab63d5c7095-serving-cert\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462832 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-config\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462859 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89jrg\" (UniqueName: \"kubernetes.io/projected/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-kube-api-access-89jrg\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462875 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462890 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462907 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462938 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c61ac02-3732-4bf3-b488-ac09d35092f4-proxy-tls\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462953 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-srv-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462968 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462984 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e162e9-571a-4ccf-b6ff-d97b4996757a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.462998 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-srv-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463015 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b79fc526-fe5c-4375-a347-68920eae4794-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463029 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-key\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463044 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-metrics-tls\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463058 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463074 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-registration-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463090 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0c5692-7618-4522-9a4d-e10d7027c791-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463114 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463129 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rkm8\" (UniqueName: \"kubernetes.io/projected/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-kube-api-access-4rkm8\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463145 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjgd7\" (UniqueName: \"kubernetes.io/projected/1c61ac02-3732-4bf3-b488-ac09d35092f4-kube-api-access-cjgd7\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463173 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ng72\" (UniqueName: \"kubernetes.io/projected/7c0c5692-7618-4522-9a4d-e10d7027c791-kube-api-access-5ng72\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463198 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlqjx\" (UniqueName: \"kubernetes.io/projected/82e162e9-571a-4ccf-b6ff-d97b4996757a-kube-api-access-qlqjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463214 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0828680f-a2ec-4519-9704-206f7b7feb35-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463751 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463779 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-socket-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463798 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-mountpoint-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463814 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b8e611-0eaf-40d4-8692-2e7cec96aafe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463831 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q6dk\" (UniqueName: \"kubernetes.io/projected/0828680f-a2ec-4519-9704-206f7b7feb35-kube-api-access-5q6dk\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.463978 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:01.963947815 +0000 UTC m=+143.902406683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.464083 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f7faf2-57d2-4230-b6a8-88c30096e372-config\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.464890 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baa66917-fa78-4b13-9bb9-aab63d5c7095-config\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.465476 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84b8e611-0eaf-40d4-8692-2e7cec96aafe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.466607 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f8579e3d-102f-44ad-befd-fadc7bdf08ae-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.463847 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dvh7\" (UniqueName: \"kubernetes.io/projected/50ec5665-fef7-43c9-9143-29200698525d-kube-api-access-4dvh7\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.467357 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-plugins-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.467441 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-node-bootstrap-token\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.467527 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c61ac02-3732-4bf3-b488-ac09d35092f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468014 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468118 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50ec5665-fef7-43c9-9143-29200698525d-cert\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468210 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-default-certificate\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468343 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-service-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468428 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpdtp\" (UniqueName: \"kubernetes.io/projected/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-kube-api-access-gpdtp\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468508 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-config-volume\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468575 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/05a079f5-4049-4140-af22-5271a97fee7a-tmpfs\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468648 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpzr2\" (UniqueName: \"kubernetes.io/projected/be25c0f8-af76-4675-88c5-2ef8f85c8b64-kube-api-access-kpzr2\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.468751 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-webhook-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469471 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp556\" (UniqueName: \"kubernetes.io/projected/3406e452-0d64-4882-bbcf-46486cbbb1d1-kube-api-access-qp556\") pod \"migrator-59844c95c7-2tk5z\" (UID: \"3406e452-0d64-4882-bbcf-46486cbbb1d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469553 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt9cr\" (UniqueName: \"kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469620 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7jf9\" (UniqueName: \"kubernetes.io/projected/baa66917-fa78-4b13-9bb9-aab63d5c7095-kube-api-access-n7jf9\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469690 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b79fc526-fe5c-4375-a347-68920eae4794-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469770 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469844 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469926 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e0d835d7-9572-48dd-b237-cfa225e29d88-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.469992 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll96z\" (UniqueName: \"kubernetes.io/projected/56bb7cee-bac4-4696-8d3d-c89a96d8de65-kube-api-access-ll96z\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.470803 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c61ac02-3732-4bf3-b488-ac09d35092f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.472018 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2ab2de99-0003-4d85-8cb8-fe347801f9d1-service-ca-bundle\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.474774 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-srv-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.475329 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.475796 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-stats-auth\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.475799 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82e162e9-571a-4ccf-b6ff-d97b4996757a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.490303 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-cabundle\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.491826 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.492967 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c61ac02-3732-4bf3-b488-ac09d35092f4-proxy-tls\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.493101 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.495091 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad15f61-9169-4a88-adb8-bd63071736d0-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.495228 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8579e3d-102f-44ad-befd-fadc7bdf08ae-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.494498 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.495936 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-metrics-certs\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.495943 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82e162e9-571a-4ccf-b6ff-d97b4996757a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.497211 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c0c5692-7618-4522-9a4d-e10d7027c791-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.497316 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-config-volume\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.497454 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-service-ca\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.497744 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/05a079f5-4049-4140-af22-5271a97fee7a-tmpfs\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.498517 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-config\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.499023 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b79fc526-fe5c-4375-a347-68920eae4794-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.499876 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.500825 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.501200 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cad15f61-9169-4a88-adb8-bd63071736d0-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.501286 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.501439 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-serving-cert\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.501915 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-srv-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.501949 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-profile-collector-cert\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.502226 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.503115 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b79fc526-fe5c-4375-a347-68920eae4794-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.503915 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59f7faf2-57d2-4230-b6a8-88c30096e372-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.510436 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/be25c0f8-af76-4675-88c5-2ef8f85c8b64-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.512606 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2ab2de99-0003-4d85-8cb8-fe347801f9d1-default-certificate\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.513143 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0e2e94ed-63a4-4335-8edd-67b592965119-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.516808 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523457 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-metrics-tls\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523552 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baa66917-fa78-4b13-9bb9-aab63d5c7095-serving-cert\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523677 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" event={"ID":"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c","Type":"ContainerStarted","Data":"768c26ced9482a59e237ae5a9d4617d2344553868f54767869b0f2a18ae0de69"} Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523762 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" event={"ID":"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c","Type":"ContainerStarted","Data":"ae9bb307b4fda4d8ac7ef0703efa9dc9b2bfa4e4fc740439a33e437e0ea91e8d"} Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523789 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-apiservice-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523800 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0828680f-a2ec-4519-9704-206f7b7feb35-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.523909 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-etcd-client\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.524118 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/05a079f5-4049-4140-af22-5271a97fee7a-webhook-cert\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.524940 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.525503 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" event={"ID":"c78675e4-742c-4851-b9b9-97c086a35138","Type":"ContainerStarted","Data":"fb14f3d61ed740599ad87e1d16410606a90654c42b9f90be1efdbbb0d5849f85"} Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.528640 4603 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7wjwr container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.528686 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.529224 4603 generic.go:334] "Generic (PLEG): container finished" podID="3b3a59ee-ec27-4879-9a3b-e7004d4394d9" containerID="c11d9b4ff9df85554cb920f34ecd0b94efac9895bb5bbe3667b3f7d01469d4e5" exitCode=0 Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.529249 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" event={"ID":"3b3a59ee-ec27-4879-9a3b-e7004d4394d9","Type":"ContainerDied","Data":"c11d9b4ff9df85554cb920f34ecd0b94efac9895bb5bbe3667b3f7d01469d4e5"} Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.529273 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" event={"ID":"3b3a59ee-ec27-4879-9a3b-e7004d4394d9","Type":"ContainerStarted","Data":"10d2ae28da32b5744fd8a1e9bb745d07081cbfb7229a366e28decca0d236cc24"} Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.529589 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.531544 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e0d835d7-9572-48dd-b237-cfa225e29d88-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.531851 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.533474 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/84b8e611-0eaf-40d4-8692-2e7cec96aafe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.533638 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71dd6812-e1e9-4014-a828-261649ee0ac7-signing-key\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.537048 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs4gg\" (UniqueName: \"kubernetes.io/projected/2ab2de99-0003-4d85-8cb8-fe347801f9d1-kube-api-access-gs4gg\") pod \"router-default-5444994796-hswfh\" (UID: \"2ab2de99-0003-4d85-8cb8-fe347801f9d1\") " pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.546361 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr5sr\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-kube-api-access-fr5sr\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.555249 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4qd5\" (UniqueName: \"kubernetes.io/projected/71dd6812-e1e9-4014-a828-261649ee0ac7-kube-api-access-l4qd5\") pod \"service-ca-9c57cc56f-cffbx\" (UID: \"71dd6812-e1e9-4014-a828-261649ee0ac7\") " pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.573376 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.573414 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzvvw\" (UniqueName: \"kubernetes.io/projected/37afc549-2a93-48cb-85d9-ad284888e2f5-kube-api-access-dzvvw\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.573475 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-registration-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.575086 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.074219561 +0000 UTC m=+144.012678379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575706 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-registration-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575745 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-socket-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575767 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-mountpoint-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575806 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dvh7\" (UniqueName: \"kubernetes.io/projected/50ec5665-fef7-43c9-9143-29200698525d-kube-api-access-4dvh7\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575871 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-mountpoint-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.578645 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-plugins-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583340 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-socket-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.575821 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-plugins-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583474 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-node-bootstrap-token\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583518 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50ec5665-fef7-43c9-9143-29200698525d-cert\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583606 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll96z\" (UniqueName: \"kubernetes.io/projected/56bb7cee-bac4-4696-8d3d-c89a96d8de65-kube-api-access-ll96z\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583651 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-certs\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583691 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-csi-data-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.583854 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37afc549-2a93-48cb-85d9-ad284888e2f5-csi-data-dir\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.601213 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-certs\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.606383 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50ec5665-fef7-43c9-9143-29200698525d-cert\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.625546 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.629348 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjkvr\" (UniqueName: \"kubernetes.io/projected/cad15f61-9169-4a88-adb8-bd63071736d0-kube-api-access-wjkvr\") pod \"openshift-apiserver-operator-796bbdcf4f-j84b7\" (UID: \"cad15f61-9169-4a88-adb8-bd63071736d0\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.629611 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w87ch\" (UniqueName: \"kubernetes.io/projected/e0d835d7-9572-48dd-b237-cfa225e29d88-kube-api-access-w87ch\") pod \"multus-admission-controller-857f4d67dd-p72j4\" (UID: \"e0d835d7-9572-48dd-b237-cfa225e29d88\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.629907 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/56bb7cee-bac4-4696-8d3d-c89a96d8de65-node-bootstrap-token\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.631557 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b79fc526-fe5c-4375-a347-68920eae4794-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lpr99\" (UID: \"b79fc526-fe5c-4375-a347-68920eae4794\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.658650 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpbdg\" (UniqueName: \"kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg\") pod \"marketplace-operator-79b997595-pq477\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.659086 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvdb4\" (UniqueName: \"kubernetes.io/projected/05a079f5-4049-4140-af22-5271a97fee7a-kube-api-access-lvdb4\") pod \"packageserver-d55dfcdfc-fbk8r\" (UID: \"05a079f5-4049-4140-af22-5271a97fee7a\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.666473 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.683513 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp"] Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.684783 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.685496 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.185481356 +0000 UTC m=+144.123940174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.696973 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.703105 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rkm8\" (UniqueName: \"kubernetes.io/projected/bb0a48c5-fc0f-4336-880f-72a7965ea0fc-kube-api-access-4rkm8\") pod \"etcd-operator-b45778765-gvrsj\" (UID: \"bb0a48c5-fc0f-4336-880f-72a7965ea0fc\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.712620 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgtvc\" (UniqueName: \"kubernetes.io/projected/722a6d6d-3382-415f-828c-db2fa023bbff-kube-api-access-vgtvc\") pod \"downloads-7954f5f757-2d2ld\" (UID: \"722a6d6d-3382-415f-828c-db2fa023bbff\") " pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.725321 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fgjdh"] Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.728225 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjgd7\" (UniqueName: \"kubernetes.io/projected/1c61ac02-3732-4bf3-b488-ac09d35092f4-kube-api-access-cjgd7\") pod \"machine-config-controller-84d6567774-2ctlk\" (UID: \"1c61ac02-3732-4bf3-b488-ac09d35092f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.749046 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.754232 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.767468 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ng72\" (UniqueName: \"kubernetes.io/projected/7c0c5692-7618-4522-9a4d-e10d7027c791-kube-api-access-5ng72\") pod \"package-server-manager-789f6589d5-x7f6d\" (UID: \"7c0c5692-7618-4522-9a4d-e10d7027c791\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: W0930 19:49:01.780846 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod871dd358_5fc9_4438_857a_193463cc9a9a.slice/crio-8baafa3baa6e2350f83301f414e4b6a60477ab60620e25524fd2502a640cd742 WatchSource:0}: Error finding container 8baafa3baa6e2350f83301f414e4b6a60477ab60620e25524fd2502a640cd742: Status 404 returned error can't find the container with id 8baafa3baa6e2350f83301f414e4b6a60477ab60620e25524fd2502a640cd742 Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.781441 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlqjx\" (UniqueName: \"kubernetes.io/projected/82e162e9-571a-4ccf-b6ff-d97b4996757a-kube-api-access-qlqjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-wrp88\" (UID: \"82e162e9-571a-4ccf-b6ff-d97b4996757a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.787275 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.787596 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.287584121 +0000 UTC m=+144.226042939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.792684 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.798776 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsfgt\" (UniqueName: \"kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt\") pod \"collect-profiles-29321025-fvp2j\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.804076 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.814701 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm4rb\" (UniqueName: \"kubernetes.io/projected/84b8e611-0eaf-40d4-8692-2e7cec96aafe-kube-api-access-tm4rb\") pod \"openshift-controller-manager-operator-756b6f6bc6-4lz6k\" (UID: \"84b8e611-0eaf-40d4-8692-2e7cec96aafe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.832354 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpdtp\" (UniqueName: \"kubernetes.io/projected/52aa0be3-4c90-4f6c-b592-0f81457e3e8c-kube-api-access-gpdtp\") pod \"catalog-operator-68c6474976-n2c2r\" (UID: \"52aa0be3-4c90-4f6c-b592-0f81457e3e8c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.842303 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.849601 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdvm\" (UniqueName: \"kubernetes.io/projected/0e2e94ed-63a4-4335-8edd-67b592965119-kube-api-access-4vdvm\") pod \"control-plane-machine-set-operator-78cbb6b69f-nfzcr\" (UID: \"0e2e94ed-63a4-4335-8edd-67b592965119\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.878829 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f8579e3d-102f-44ad-befd-fadc7bdf08ae-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-69dcb\" (UID: \"f8579e3d-102f-44ad-befd-fadc7bdf08ae\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.880374 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.889057 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.890189 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpzr2\" (UniqueName: \"kubernetes.io/projected/be25c0f8-af76-4675-88c5-2ef8f85c8b64-kube-api-access-kpzr2\") pod \"olm-operator-6b444d44fb-5drd5\" (UID: \"be25c0f8-af76-4675-88c5-2ef8f85c8b64\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.890279 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.390263973 +0000 UTC m=+144.328722791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.896552 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.903470 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.909027 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.918537 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.920206 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q6dk\" (UniqueName: \"kubernetes.io/projected/0828680f-a2ec-4519-9704-206f7b7feb35-kube-api-access-5q6dk\") pod \"cluster-samples-operator-665b6dd947-6x6gq\" (UID: \"0828680f-a2ec-4519-9704-206f7b7feb35\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.929085 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.932890 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.933736 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89jrg\" (UniqueName: \"kubernetes.io/projected/61e28abf-6deb-4c8b-be7e-9a1d226ebdc2-kube-api-access-89jrg\") pod \"dns-default-8s2lm\" (UID: \"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2\") " pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.943568 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.950266 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.957645 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp556\" (UniqueName: \"kubernetes.io/projected/3406e452-0d64-4882-bbcf-46486cbbb1d1-kube-api-access-qp556\") pod \"migrator-59844c95c7-2tk5z\" (UID: \"3406e452-0d64-4882-bbcf-46486cbbb1d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.974142 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt9cr\" (UniqueName: \"kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr\") pod \"console-f9d7485db-7gjfv\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.978596 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.986394 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:01 crc kubenswrapper[4603]: I0930 19:49:01.991972 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:01 crc kubenswrapper[4603]: E0930 19:49:01.992323 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.492301567 +0000 UTC m=+144.430760385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.000293 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59f7faf2-57d2-4230-b6a8-88c30096e372-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nk79f\" (UID: \"59f7faf2-57d2-4230-b6a8-88c30096e372\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.013497 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7jf9\" (UniqueName: \"kubernetes.io/projected/baa66917-fa78-4b13-9bb9-aab63d5c7095-kube-api-access-n7jf9\") pod \"service-ca-operator-777779d784-jq4d9\" (UID: \"baa66917-fa78-4b13-9bb9-aab63d5c7095\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.025284 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.056993 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dvh7\" (UniqueName: \"kubernetes.io/projected/50ec5665-fef7-43c9-9143-29200698525d-kube-api-access-4dvh7\") pod \"ingress-canary-89sx8\" (UID: \"50ec5665-fef7-43c9-9143-29200698525d\") " pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.058401 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzvvw\" (UniqueName: \"kubernetes.io/projected/37afc549-2a93-48cb-85d9-ad284888e2f5-kube-api-access-dzvvw\") pod \"csi-hostpathplugin-5lmct\" (UID: \"37afc549-2a93-48cb-85d9-ad284888e2f5\") " pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.075940 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.077346 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll96z\" (UniqueName: \"kubernetes.io/projected/56bb7cee-bac4-4696-8d3d-c89a96d8de65-kube-api-access-ll96z\") pod \"machine-config-server-z2tgs\" (UID: \"56bb7cee-bac4-4696-8d3d-c89a96d8de65\") " pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.087554 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.096393 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.098298 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.098891 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.598868338 +0000 UTC m=+144.537327156 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.098922 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.102603 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-g965c"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.108141 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8jxkn"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.149280 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.157626 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.199814 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.200232 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.700216242 +0000 UTC m=+144.638675060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: W0930 19:49:02.207413 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2aa0c946_2015_4a51_b4a6_be84957d1ffa.slice/crio-effb7cf8c2e199c195c0639cfd3566322ff3dee31daae59612d288ca07372ab8 WatchSource:0}: Error finding container effb7cf8c2e199c195c0639cfd3566322ff3dee31daae59612d288ca07372ab8: Status 404 returned error can't find the container with id effb7cf8c2e199c195c0639cfd3566322ff3dee31daae59612d288ca07372ab8 Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.265368 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cb8bv"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.266006 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.271885 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-cffbx"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.306319 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.306809 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.307108 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.807095033 +0000 UTC m=+144.745553851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.307447 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv7tl"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.310632 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.318109 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-89sx8" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.323733 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-z2tgs" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.333892 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:49:02 crc kubenswrapper[4603]: W0930 19:49:02.384786 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce0e78ed_af1c_4419_b6a2_bb1ba7f54afd.slice/crio-77a57c97ba05aed159778a5485698d9fdf97a85a2f68a0be86325b1f813438d8 WatchSource:0}: Error finding container 77a57c97ba05aed159778a5485698d9fdf97a85a2f68a0be86325b1f813438d8: Status 404 returned error can't find the container with id 77a57c97ba05aed159778a5485698d9fdf97a85a2f68a0be86325b1f813438d8 Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.407833 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.408528 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:02.908516969 +0000 UTC m=+144.846975787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.420276 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.449832 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gvrsj"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.449897 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.449910 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2d2ld"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.495533 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.508602 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.509006 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.008990728 +0000 UTC m=+144.947449546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.563892 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" event={"ID":"bb0a48c5-fc0f-4336-880f-72a7965ea0fc","Type":"ContainerStarted","Data":"04dc3a8238538d0c7cba8362632bf82a0837cfdb0557fe521fe30df3280bfe2b"} Sep 30 19:49:02 crc kubenswrapper[4603]: W0930 19:49:02.574716 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod722a6d6d_3382_415f_828c_db2fa023bbff.slice/crio-4da65b4e5c49118dbede7f195f624afcdf43173fbbb8c775bbd7040595706e7e WatchSource:0}: Error finding container 4da65b4e5c49118dbede7f195f624afcdf43173fbbb8c775bbd7040595706e7e: Status 404 returned error can't find the container with id 4da65b4e5c49118dbede7f195f624afcdf43173fbbb8c775bbd7040595706e7e Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.574885 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" event={"ID":"71dd6812-e1e9-4014-a828-261649ee0ac7","Type":"ContainerStarted","Data":"f04961bce3720475055ee3e54560558141d18f8e526002f93f2e264c42b1570d"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.609354 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.611148 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.618041 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" event={"ID":"c78675e4-742c-4851-b9b9-97c086a35138","Type":"ContainerStarted","Data":"a3514db8be0f03e2a4d2467afc272207929831df08f1b4e1974d1111b67d3345"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.618087 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" event={"ID":"c78675e4-742c-4851-b9b9-97c086a35138","Type":"ContainerStarted","Data":"dc5dc4b4233518dae4e369582fbe7870677f6c0fb35c6812e8db1367b0df64c9"} Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.620577 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.120560411 +0000 UTC m=+145.059019229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.625306 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" event={"ID":"3b3a59ee-ec27-4879-9a3b-e7004d4394d9","Type":"ContainerStarted","Data":"cdc468f0511d4456695a50af493c1406aff2368494da77f4761435fc51c2bf48"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.625891 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.645610 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" event={"ID":"9a64ba70-8f84-4334-8e7f-df515a31a3c5","Type":"ContainerStarted","Data":"8c561199d1dc27514afc966a5dd249ae43e15fd04cac491410e845eba5cb7b92"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.659421 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" event={"ID":"90a9f0f4-2f8d-4ce9-98fd-db23877e381f","Type":"ContainerStarted","Data":"7d9f5f677e3541905ed2f74dab2a492bc19bc3d84c19c4f1f35e5f3af3493ddb"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.661962 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" event={"ID":"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd","Type":"ContainerStarted","Data":"77a57c97ba05aed159778a5485698d9fdf97a85a2f68a0be86325b1f813438d8"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.663540 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" event={"ID":"c7503d65-f97f-45a5-94ec-9f210ea705c9","Type":"ContainerStarted","Data":"c5c5b1c7fb2fae907f8cecf30209fb8c119f5048b334e056ce1fbbc4cc07bd77"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.668316 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" event={"ID":"a4a6e1f1-d082-4737-83b9-0dbaad5338c5","Type":"ContainerStarted","Data":"080535840c2d1d57c4cf0035da6537905a4049920b3b353535c7e7331eaface2"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.672326 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" event={"ID":"871dd358-5fc9-4438-857a-193463cc9a9a","Type":"ContainerStarted","Data":"90b8285094fdf1090fdb693efe9c33605f1a8db3d2bf0355ef625808d9c3296e"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.672371 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" event={"ID":"871dd358-5fc9-4438-857a-193463cc9a9a","Type":"ContainerStarted","Data":"8baafa3baa6e2350f83301f414e4b6a60477ab60620e25524fd2502a640cd742"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.687872 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" event={"ID":"007744a9-1794-4ff2-ba1d-f7c6d794f987","Type":"ContainerStarted","Data":"4ab453827f6dbc06930c908f2f40cb07bf1dfb69866f7840c2d250a872e269a5"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.695467 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" event={"ID":"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4","Type":"ContainerStarted","Data":"649501429d8757f094ec95bc5d7d823d9d0962802709070135934acf0141b599"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.695524 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" event={"ID":"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4","Type":"ContainerStarted","Data":"1b264cfa4412b74d0b572642bfc3cb5dd4eceed8e0f4f8016f10bd51e248cf12"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.699653 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hswfh" event={"ID":"2ab2de99-0003-4d85-8cb8-fe347801f9d1","Type":"ContainerStarted","Data":"63c8bd891839ad0abed079d52708625ffc5a28b430326043c1ae31de2ea39f1f"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.699737 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hswfh" event={"ID":"2ab2de99-0003-4d85-8cb8-fe347801f9d1","Type":"ContainerStarted","Data":"f14ecddbd0db658e9a0a5cbda1b1d6dcd0075803e16b76d09149f9813e6d81db"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.703743 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" event={"ID":"cfa60764-186a-4584-89f8-bb7df4bd2831","Type":"ContainerStarted","Data":"0e15c170c248f3c7017f727819dc342081d11358b17504a72c2835b169c05025"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.704283 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.706617 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" event={"ID":"2aa0c946-2015-4a51-b4a6-be84957d1ffa","Type":"ContainerStarted","Data":"effb7cf8c2e199c195c0639cfd3566322ff3dee31daae59612d288ca07372ab8"} Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.712180 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.712349 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.212297354 +0000 UTC m=+145.150756172 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.712486 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.713742 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.213725294 +0000 UTC m=+145.152184112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.745891 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.745970 4603 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7pz75 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.745998 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Sep 30 19:49:02 crc kubenswrapper[4603]: W0930 19:49:02.784228 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe25c0f8_af76_4675_88c5_2ef8f85c8b64.slice/crio-5abf0b05ce30e9f4db31968184ae351f408139ae52fa02768b8021e964ad893f WatchSource:0}: Error finding container 5abf0b05ce30e9f4db31968184ae351f408139ae52fa02768b8021e964ad893f: Status 404 returned error can't find the container with id 5abf0b05ce30e9f4db31968184ae351f408139ae52fa02768b8021e964ad893f Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.814073 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.815153 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.31513987 +0000 UTC m=+145.253598688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.901206 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.917123 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:02 crc kubenswrapper[4603]: E0930 19:49:02.917502 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.417484632 +0000 UTC m=+145.355943450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.942217 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r"] Sep 30 19:49:02 crc kubenswrapper[4603]: I0930 19:49:02.946658 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p72j4"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.027224 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.027841 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.52782093 +0000 UTC m=+145.466279758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.130811 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.131159 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.63114365 +0000 UTC m=+145.569602468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.143337 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.159570 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.231897 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.232816 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.732799093 +0000 UTC m=+145.671257911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.338695 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.339024 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.839012434 +0000 UTC m=+145.777471252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.445587 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.445922 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:03.945907205 +0000 UTC m=+145.884366013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.468126 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" podStartSLOduration=124.468110733 podStartE2EDuration="2m4.468110733s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.464206103 +0000 UTC m=+145.402664921" watchObservedRunningTime="2025-09-30 19:49:03.468110733 +0000 UTC m=+145.406569551" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.555449 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.555834 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.055818932 +0000 UTC m=+145.994277740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.558039 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.593660 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.622971 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-hswfh" podStartSLOduration=123.622953779 podStartE2EDuration="2m3.622953779s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.601031289 +0000 UTC m=+145.539490107" watchObservedRunningTime="2025-09-30 19:49:03.622953779 +0000 UTC m=+145.561412597" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.623957 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.626271 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.626829 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" podStartSLOduration=124.626819708 podStartE2EDuration="2m4.626819708s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.626746216 +0000 UTC m=+145.565205034" watchObservedRunningTime="2025-09-30 19:49:03.626819708 +0000 UTC m=+145.565278526" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.647544 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:03 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:03 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:03 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.647581 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.660566 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.660901 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.16088505 +0000 UTC m=+146.099343868 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.715886 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fsfn4" podStartSLOduration=125.715868885 podStartE2EDuration="2m5.715868885s" podCreationTimestamp="2025-09-30 19:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.674842885 +0000 UTC m=+145.613301703" watchObservedRunningTime="2025-09-30 19:49:03.715868885 +0000 UTC m=+145.654327703" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.724265 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.728426 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-8s2lm"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.760501 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" podStartSLOduration=124.760484576 podStartE2EDuration="2m4.760484576s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.753835177 +0000 UTC m=+145.692293995" watchObservedRunningTime="2025-09-30 19:49:03.760484576 +0000 UTC m=+145.698943384" Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.762821 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.763196 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.263185572 +0000 UTC m=+146.201644380 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.830956 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d"] Sep 30 19:49:03 crc kubenswrapper[4603]: W0930 19:49:03.835219 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c61ac02_3732_4bf3_b488_ac09d35092f4.slice/crio-d229bf44fa06ff52597776787b54fdf0ceea6a9a1e21738d1b12c8c51e203f11 WatchSource:0}: Error finding container d229bf44fa06ff52597776787b54fdf0ceea6a9a1e21738d1b12c8c51e203f11: Status 404 returned error can't find the container with id d229bf44fa06ff52597776787b54fdf0ceea6a9a1e21738d1b12c8c51e203f11 Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.863845 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.864373 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.364359831 +0000 UTC m=+146.302818649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.870687 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" event={"ID":"cfa60764-186a-4584-89f8-bb7df4bd2831","Type":"ContainerStarted","Data":"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f"} Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.933756 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9"] Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.949295 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" event={"ID":"71dd6812-e1e9-4014-a828-261649ee0ac7","Type":"ContainerStarted","Data":"a0edd03dbea34aae2989228d39f71c0f1c40aade45580b71b0d11502108e64ed"} Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.966833 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:03 crc kubenswrapper[4603]: E0930 19:49:03.970603 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.470588013 +0000 UTC m=+146.409046831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:03 crc kubenswrapper[4603]: W0930 19:49:03.985879 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61e28abf_6deb_4c8b_be7e_9a1d226ebdc2.slice/crio-88b9f75de02df4d9ad091eeab3cd6fb1b2d352604809761f5a299e3032b87cd8 WatchSource:0}: Error finding container 88b9f75de02df4d9ad091eeab3cd6fb1b2d352604809761f5a299e3032b87cd8: Status 404 returned error can't find the container with id 88b9f75de02df4d9ad091eeab3cd6fb1b2d352604809761f5a299e3032b87cd8 Sep 30 19:49:03 crc kubenswrapper[4603]: I0930 19:49:03.993548 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-cffbx" podStartSLOduration=123.993528431 podStartE2EDuration="2m3.993528431s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:03.976365656 +0000 UTC m=+145.914824474" watchObservedRunningTime="2025-09-30 19:49:03.993528431 +0000 UTC m=+145.931987249" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.032297 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" event={"ID":"2aa0c946-2015-4a51-b4a6-be84957d1ffa","Type":"ContainerStarted","Data":"04deb285b06e6d906b1416cc672ef9c06c5e3751ee08ded2513cf483cd1ecd48"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.073623 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.074660 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.574646794 +0000 UTC m=+146.513105612 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.110111 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" event={"ID":"a4a6e1f1-d082-4737-83b9-0dbaad5338c5","Type":"ContainerStarted","Data":"ad2b9b62d55ab611a1d619e12884ae0762c3f061fbd925c9cd570f64a238d661"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.116544 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.136926 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.137529 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-g965c" podStartSLOduration=125.1375152 podStartE2EDuration="2m5.1375152s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.134722911 +0000 UTC m=+146.073181729" watchObservedRunningTime="2025-09-30 19:49:04.1375152 +0000 UTC m=+146.075974018" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.147353 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.147394 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.178090 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.179599 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.679571738 +0000 UTC m=+146.618030546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.180637 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" event={"ID":"f8579e3d-102f-44ad-befd-fadc7bdf08ae","Type":"ContainerStarted","Data":"4f39fbc2455ea37f478c8f46644af376efe84f83e341128fa888db605ebbbc00"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.220391 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" event={"ID":"4db563e4-c2e0-43d9-bc1c-0961bca8cf09","Type":"ContainerStarted","Data":"83e4f8409ff19461e303b8c6d634a6c07163d3fb325e97d691d4332d69b690d0"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.229655 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" event={"ID":"cad15f61-9169-4a88-adb8-bd63071736d0","Type":"ContainerStarted","Data":"a98d2048e8b761f30cfe00bf71212fcb64353af97ee2a70c29ccb45788039d1a"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.255454 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5lmct"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.255493 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-z2tgs" event={"ID":"56bb7cee-bac4-4696-8d3d-c89a96d8de65","Type":"ContainerStarted","Data":"fdc18f67df9ff9e770f868aab0844a8a6777916a5df6281ddfa577c24dcbc196"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.278916 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.280220 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.780204892 +0000 UTC m=+146.718663710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.286242 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" event={"ID":"f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4","Type":"ContainerStarted","Data":"d44511952b16006db9a14826f4a21c33637d6ae70b563b39105053e268b3e97e"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.288265 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" event={"ID":"be25c0f8-af76-4675-88c5-2ef8f85c8b64","Type":"ContainerStarted","Data":"5abf0b05ce30e9f4db31968184ae351f408139ae52fa02768b8021e964ad893f"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.299535 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" podStartSLOduration=125.299519488 podStartE2EDuration="2m5.299519488s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.289328761 +0000 UTC m=+146.227787579" watchObservedRunningTime="2025-09-30 19:49:04.299519488 +0000 UTC m=+146.237978306" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.301939 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-89sx8"] Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.305741 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.307787 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" event={"ID":"b79fc526-fe5c-4375-a347-68920eae4794","Type":"ContainerStarted","Data":"696be7c16b340dd41f8c5353263c244a9ad79d6f6f2980aef61251436ac8fd85"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.332071 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-z2tgs" podStartSLOduration=6.332055268 podStartE2EDuration="6.332055268s" podCreationTimestamp="2025-09-30 19:48:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.331543534 +0000 UTC m=+146.270002342" watchObservedRunningTime="2025-09-30 19:49:04.332055268 +0000 UTC m=+146.270514086" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.333483 4603 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6lk6w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.333547 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.337391 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" event={"ID":"e0d835d7-9572-48dd-b237-cfa225e29d88","Type":"ContainerStarted","Data":"aaadbb23a5df8d35e98ebb0326d982e180535cac041b60dffbeb2e8a276221fc"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.360310 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" event={"ID":"82e162e9-571a-4ccf-b6ff-d97b4996757a","Type":"ContainerStarted","Data":"d1ad95157b237c4ffeb7119039e827e38a833f5b165db55556256e67f8c2b268"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.372689 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2d2ld" event={"ID":"722a6d6d-3382-415f-828c-db2fa023bbff","Type":"ContainerStarted","Data":"4da65b4e5c49118dbede7f195f624afcdf43173fbbb8c775bbd7040595706e7e"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.387841 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.389747 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.889733368 +0000 UTC m=+146.828192186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.398915 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" event={"ID":"79714c45-e39c-431c-b45a-eb244926ced5","Type":"ContainerStarted","Data":"5d64f2133d7c62330ed09ee407f6fb00f31ba0dcf412417439b057e5151b98dc"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.402178 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.403848 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" event={"ID":"871dd358-5fc9-4438-857a-193463cc9a9a","Type":"ContainerStarted","Data":"a94f74fe2e7c6406e7a420be1bc286f1586809ba359cad40da8c5674d387a7ca"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.406669 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" event={"ID":"05a079f5-4049-4140-af22-5271a97fee7a","Type":"ContainerStarted","Data":"84e9a6973c133d25717405df2f429d40c9fe14a5a6ffa3a05ed1f4db6f201527"} Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.433519 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fgjdh" podStartSLOduration=124.433505315 podStartE2EDuration="2m4.433505315s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.37458602 +0000 UTC m=+146.313044848" watchObservedRunningTime="2025-09-30 19:49:04.433505315 +0000 UTC m=+146.371964123" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.434914 4603 patch_prober.go:28] interesting pod/console-operator-58897d9998-cb8bv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.434953 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" podUID="ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.489014 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.490454 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:04.990438844 +0000 UTC m=+146.928897662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.496006 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.498499 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" podStartSLOduration=124.498483571 podStartE2EDuration="2m4.498483571s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.434079241 +0000 UTC m=+146.372538059" watchObservedRunningTime="2025-09-30 19:49:04.498483571 +0000 UTC m=+146.436942379" Sep 30 19:49:04 crc kubenswrapper[4603]: W0930 19:49:04.517138 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50ec5665_fef7_43c9_9143_29200698525d.slice/crio-444944613dbd0950d1d316b7142eb8f2868590ba6957b22ee1882f7ec66b843c WatchSource:0}: Error finding container 444944613dbd0950d1d316b7142eb8f2868590ba6957b22ee1882f7ec66b843c: Status 404 returned error can't find the container with id 444944613dbd0950d1d316b7142eb8f2868590ba6957b22ee1882f7ec66b843c Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.535924 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" podStartSLOduration=124.535908709 podStartE2EDuration="2m4.535908709s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.500309053 +0000 UTC m=+146.438767871" watchObservedRunningTime="2025-09-30 19:49:04.535908709 +0000 UTC m=+146.474367527" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.537057 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" podStartSLOduration=124.537052621 podStartE2EDuration="2m4.537052621s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.536012282 +0000 UTC m=+146.474471100" watchObservedRunningTime="2025-09-30 19:49:04.537052621 +0000 UTC m=+146.475511439" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.592514 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.592878 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.092864479 +0000 UTC m=+147.031323297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.608150 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" podStartSLOduration=125.60813114 podStartE2EDuration="2m5.60813114s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.565249608 +0000 UTC m=+146.503708426" watchObservedRunningTime="2025-09-30 19:49:04.60813114 +0000 UTC m=+146.546589958" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.632302 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:04 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:04 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:04 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.632352 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.693358 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.693646 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.193621766 +0000 UTC m=+147.132080584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.808427 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.808755 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.308739489 +0000 UTC m=+147.247198297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.872022 4603 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7pz75 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.872077 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 19:49:04 crc kubenswrapper[4603]: I0930 19:49:04.909888 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:04 crc kubenswrapper[4603]: E0930 19:49:04.910524 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.410508205 +0000 UTC m=+147.348967023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.016267 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.016734 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.516716696 +0000 UTC m=+147.455175514 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.117424 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.118048 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.618031119 +0000 UTC m=+147.556489937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.219893 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.220272 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.720253719 +0000 UTC m=+147.658712537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.320732 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.322537 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.822520478 +0000 UTC m=+147.760979296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.427024 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.427479 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:05.927464015 +0000 UTC m=+147.865922833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.458398 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2d2ld" event={"ID":"722a6d6d-3382-415f-828c-db2fa023bbff","Type":"ContainerStarted","Data":"f0a7f3ea1ed463019906339d5b22bc5eacb36f544ef84432301c6b895fd3e903"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.459408 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.469294 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.469348 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.506006 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" event={"ID":"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6","Type":"ContainerStarted","Data":"a8b3670ecb7f7de233d126fec13661d1775d7b985d4b5fb2927839878e6c78ac"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.517469 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hbgcp" podStartSLOduration=125.517450577 podStartE2EDuration="2m5.517450577s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:04.650859328 +0000 UTC m=+146.589318146" watchObservedRunningTime="2025-09-30 19:49:05.517450577 +0000 UTC m=+147.455909395" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.528276 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.529260 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.029245921 +0000 UTC m=+147.967704739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.533854 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" event={"ID":"37afc549-2a93-48cb-85d9-ad284888e2f5","Type":"ContainerStarted","Data":"41d57b61a6696387d6eebdd263cbbc0dcaa1f5703b93d27d9a5ad8216c70fd67"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.561142 4603 generic.go:334] "Generic (PLEG): container finished" podID="90a9f0f4-2f8d-4ce9-98fd-db23877e381f" containerID="2949bf083e750f01320e1afc88cfdb4a75551e82af673eee36e17ae60c2fd969" exitCode=0 Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.561585 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" event={"ID":"90a9f0f4-2f8d-4ce9-98fd-db23877e381f","Type":"ContainerDied","Data":"2949bf083e750f01320e1afc88cfdb4a75551e82af673eee36e17ae60c2fd969"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.566154 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" event={"ID":"e0d835d7-9572-48dd-b237-cfa225e29d88","Type":"ContainerStarted","Data":"b748bada9742507f39f5bcb2d48a8e8c4c2eb25036ac6a33400f2da67c2d4360"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.574248 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8s2lm" event={"ID":"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2","Type":"ContainerStarted","Data":"88b9f75de02df4d9ad091eeab3cd6fb1b2d352604809761f5a299e3032b87cd8"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.594036 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" event={"ID":"3406e452-0d64-4882-bbcf-46486cbbb1d1","Type":"ContainerStarted","Data":"752d78ff3cab7d422663ed9211000561acf64ce90d8212afaac1c23f0d8ddf1a"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.623504 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" event={"ID":"05a079f5-4049-4140-af22-5271a97fee7a","Type":"ContainerStarted","Data":"5e6e60c1e971659c528f3287bfbb48c1bf0fe5145dc2de14e3cab6f85caca951"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.625220 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.629416 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.631124 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.131111249 +0000 UTC m=+148.069570067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.635361 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:05 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:05 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:05 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.635430 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.642321 4603 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fbk8r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" start-of-body= Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.642371 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" podUID="05a079f5-4049-4140-af22-5271a97fee7a" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.644709 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" event={"ID":"baa66917-fa78-4b13-9bb9-aab63d5c7095","Type":"ContainerStarted","Data":"8de3aef9a1fc297fc2861302692c49bbeff9b7a48d3fa98ef8f3f1ea66807b7b"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.644812 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" event={"ID":"baa66917-fa78-4b13-9bb9-aab63d5c7095","Type":"ContainerStarted","Data":"6f89129ca1e1291e253899c89987a122a35396975b99976c5f82309999a8064d"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.672432 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" event={"ID":"79714c45-e39c-431c-b45a-eb244926ced5","Type":"ContainerStarted","Data":"a4ed917327d28304f74bcecd2d17c07c18b6bb1673b5f4bce71b199b06f27d55"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.691356 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" event={"ID":"7c0c5692-7618-4522-9a4d-e10d7027c791","Type":"ContainerStarted","Data":"8acc410fb8730b6192c21b4b9a031831f6b0ab7220a9dc55867a9033ca7e7adc"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.693154 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2d2ld" podStartSLOduration=126.693143793 podStartE2EDuration="2m6.693143793s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:05.516592773 +0000 UTC m=+147.455051591" watchObservedRunningTime="2025-09-30 19:49:05.693143793 +0000 UTC m=+147.631602601" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.702550 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" event={"ID":"c7503d65-f97f-45a5-94ec-9f210ea705c9","Type":"ContainerStarted","Data":"b5f9f88d869a740a7cb713958f64d042e43dee7af52e21cd99b639c3bfe818ed"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.703739 4603 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6lk6w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.703777 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.730581 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.731716 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.231696552 +0000 UTC m=+148.170155370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.735933 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" event={"ID":"b79fc526-fe5c-4375-a347-68920eae4794","Type":"ContainerStarted","Data":"83817e8e9fa2a36b2c7b61050b3a195a5b3244ebc74facd0bdfc5eb47c689e1b"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.768623 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" event={"ID":"bb0a48c5-fc0f-4336-880f-72a7965ea0fc","Type":"ContainerStarted","Data":"13622892ae5134795cad41a3592fb273efd6808555138995068799e3f8a848fa"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.771565 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jq4d9" podStartSLOduration=125.771548709 podStartE2EDuration="2m5.771548709s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:05.766253839 +0000 UTC m=+147.704712657" watchObservedRunningTime="2025-09-30 19:49:05.771548709 +0000 UTC m=+147.710007517" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.807384 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-89sx8" event={"ID":"50ec5665-fef7-43c9-9143-29200698525d","Type":"ContainerStarted","Data":"444944613dbd0950d1d316b7142eb8f2868590ba6957b22ee1882f7ec66b843c"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.808973 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5jw9f" event={"ID":"9a64ba70-8f84-4334-8e7f-df515a31a3c5","Type":"ContainerStarted","Data":"edce018e8805a8f6790911f9fe3294fd488754568977029ab0e5aa83ca848411"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.810789 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" event={"ID":"52aa0be3-4c90-4f6c-b592-0f81457e3e8c","Type":"ContainerStarted","Data":"c28c9350098e9a2e8c7a68e491b24cb81552e211507160d69e9a587061aae71f"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.810829 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" event={"ID":"52aa0be3-4c90-4f6c-b592-0f81457e3e8c","Type":"ContainerStarted","Data":"fd9e62be277259cec041d58c2c4b40df6f86b0654e4e3f98129b00ff4ca47ec5"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.811575 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.812898 4603 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-n2c2r container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.812924 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" podUID="52aa0be3-4c90-4f6c-b592-0f81457e3e8c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.840460 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wrp88" event={"ID":"82e162e9-571a-4ccf-b6ff-d97b4996757a","Type":"ContainerStarted","Data":"22fa0bbd8f83e69be53a7235822407792c76e0bc6c508ea5b7123111223f5672"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.841506 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.843649 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.343637265 +0000 UTC m=+148.282096083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.891371 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" event={"ID":"59f7faf2-57d2-4230-b6a8-88c30096e372","Type":"ContainerStarted","Data":"2e4094dff8d594d1d69f7bc9d7525cad095c01b9562d996b1f49db9c27c5a882"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.899686 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-89sx8" podStartSLOduration=7.899671159 podStartE2EDuration="7.899671159s" podCreationTimestamp="2025-09-30 19:48:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:05.898691321 +0000 UTC m=+147.837150139" watchObservedRunningTime="2025-09-30 19:49:05.899671159 +0000 UTC m=+147.838129977" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.900127 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" podStartSLOduration=125.900123502 podStartE2EDuration="2m5.900123502s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:05.840755644 +0000 UTC m=+147.779214462" watchObservedRunningTime="2025-09-30 19:49:05.900123502 +0000 UTC m=+147.838582320" Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.908725 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" event={"ID":"84b8e611-0eaf-40d4-8692-2e7cec96aafe","Type":"ContainerStarted","Data":"c4e1d4de81839b79d42533bd95bee0a189f8274fad65d0e4844b9c8b8b597050"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.908764 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" event={"ID":"84b8e611-0eaf-40d4-8692-2e7cec96aafe","Type":"ContainerStarted","Data":"9a73652c38168f4f676010e2d2c7179751330ce2cdee6c8a2bf6aae04fd5564c"} Sep 30 19:49:05 crc kubenswrapper[4603]: I0930 19:49:05.943852 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:05 crc kubenswrapper[4603]: E0930 19:49:05.944919 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.444895796 +0000 UTC m=+148.383354614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.035619 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-gvrsj" podStartSLOduration=127.035605911 podStartE2EDuration="2m7.035605911s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:05.954982982 +0000 UTC m=+147.893441800" watchObservedRunningTime="2025-09-30 19:49:06.035605911 +0000 UTC m=+147.974064719" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.035930 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lpr99" podStartSLOduration=126.035914239 podStartE2EDuration="2m6.035914239s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.033589163 +0000 UTC m=+147.972047981" watchObservedRunningTime="2025-09-30 19:49:06.035914239 +0000 UTC m=+147.974373057" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.054353 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.055444 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.555432731 +0000 UTC m=+148.493891549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.058654 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" event={"ID":"ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd","Type":"ContainerStarted","Data":"ea19c4b6eaa7d9bfad9ca7b8243e109473e4b6bbfa4eda2748407946a9eb48d4"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.087701 4603 patch_prober.go:28] interesting pod/console-operator-58897d9998-cb8bv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.087950 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" podUID="ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.092707 4603 generic.go:334] "Generic (PLEG): container finished" podID="007744a9-1794-4ff2-ba1d-f7c6d794f987" containerID="18584a33d87da48165bca0f7da936aa7cd848f5487786a5014b7e8fbb88b36e1" exitCode=0 Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.092764 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" event={"ID":"007744a9-1794-4ff2-ba1d-f7c6d794f987","Type":"ContainerDied","Data":"18584a33d87da48165bca0f7da936aa7cd848f5487786a5014b7e8fbb88b36e1"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.107127 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" podStartSLOduration=126.107107631 podStartE2EDuration="2m6.107107631s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.106624517 +0000 UTC m=+148.045083335" watchObservedRunningTime="2025-09-30 19:49:06.107107631 +0000 UTC m=+148.045566449" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.125454 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" event={"ID":"f8579e3d-102f-44ad-befd-fadc7bdf08ae","Type":"ContainerStarted","Data":"21968b8502b2039f51013b469e32d75f83014334bcdc422c9c4121d6f0638188"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.133549 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" event={"ID":"0e2e94ed-63a4-4335-8edd-67b592965119","Type":"ContainerStarted","Data":"03cf2d53761a63a68e7ef9982c1f2e785e3376247fac7670488ae85a1e27cbab"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.133604 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" event={"ID":"0e2e94ed-63a4-4335-8edd-67b592965119","Type":"ContainerStarted","Data":"4e21d104ebdd61fbc4f6d16d3a75dfcce050dc70891574daeb00a8cc290d6d74"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.151848 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-z2tgs" event={"ID":"56bb7cee-bac4-4696-8d3d-c89a96d8de65","Type":"ContainerStarted","Data":"5d643b35c62d6d181d057a8812a5bd4229534a8c25db45bcbc237c8d9e626e68"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.156774 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.157403 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.657388932 +0000 UTC m=+148.595847750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.168354 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" event={"ID":"be25c0f8-af76-4675-88c5-2ef8f85c8b64","Type":"ContainerStarted","Data":"1de868462be8cd11357fe69ad8126ad1fccae5ebf95afaf620ea6a78ae3319e0"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.169135 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.179577 4603 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-5drd5 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.179621 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" podUID="be25c0f8-af76-4675-88c5-2ef8f85c8b64" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.180966 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j84b7" event={"ID":"cad15f61-9169-4a88-adb8-bd63071736d0","Type":"ContainerStarted","Data":"302d1ba364558b901ac3bf81de7eb1b8071bedd393c4d3b7495412fffd31f266"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.202939 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7gjfv" event={"ID":"bd8dd34e-aa2d-4388-9d52-299033710686","Type":"ContainerStarted","Data":"9fbcb7b43ad6b48ffb14479c2bb8e5bf310b9a3c29eb49d7210f2ef5a26102dd"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.214185 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" event={"ID":"0828680f-a2ec-4519-9704-206f7b7feb35","Type":"ContainerStarted","Data":"a3d2b5b231120a8990fc7b16a14b44d6855a30004b0224a6e126776a87a51130"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.224346 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" event={"ID":"4db563e4-c2e0-43d9-bc1c-0961bca8cf09","Type":"ContainerStarted","Data":"037a89d66a349dee24f94b7d9e9e69664e188515820a458257236d6ba0437a15"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.240965 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" event={"ID":"a4a6e1f1-d082-4737-83b9-0dbaad5338c5","Type":"ContainerStarted","Data":"109a96ef68daa295fa153836b9ba1ee88d0b8ddcee8c4de1f305ccbac994e59a"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.243309 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" event={"ID":"1c61ac02-3732-4bf3-b488-ac09d35092f4","Type":"ContainerStarted","Data":"f2d83d1548d9b13256a5c16e8d9874ec8ade5c25dcd742a7e46dddac2ec50052"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.243332 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" event={"ID":"1c61ac02-3732-4bf3-b488-ac09d35092f4","Type":"ContainerStarted","Data":"d229bf44fa06ff52597776787b54fdf0ceea6a9a1e21738d1b12c8c51e203f11"} Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.254846 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4lz6k" podStartSLOduration=127.254823975 podStartE2EDuration="2m7.254823975s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.25288179 +0000 UTC m=+148.191340608" watchObservedRunningTime="2025-09-30 19:49:06.254823975 +0000 UTC m=+148.193282793" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.262143 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.268886 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.768869952 +0000 UTC m=+148.707328850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.269109 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.345761 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-69dcb" podStartSLOduration=127.345747575 podStartE2EDuration="2m7.345747575s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.345493447 +0000 UTC m=+148.283952265" watchObservedRunningTime="2025-09-30 19:49:06.345747575 +0000 UTC m=+148.284206393" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.346033 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9nfqj" podStartSLOduration=126.346029203 podStartE2EDuration="2m6.346029203s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.294652441 +0000 UTC m=+148.233111259" watchObservedRunningTime="2025-09-30 19:49:06.346029203 +0000 UTC m=+148.284488021" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.371702 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.377850 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.877836442 +0000 UTC m=+148.816295260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.410453 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" podStartSLOduration=127.410436284 podStartE2EDuration="2m7.410436284s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.384453688 +0000 UTC m=+148.322912506" watchObservedRunningTime="2025-09-30 19:49:06.410436284 +0000 UTC m=+148.348895092" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.412726 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" podStartSLOduration=127.412718187 podStartE2EDuration="2m7.412718187s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.40855152 +0000 UTC m=+148.347010338" watchObservedRunningTime="2025-09-30 19:49:06.412718187 +0000 UTC m=+148.351177005" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.465839 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nfzcr" podStartSLOduration=126.465822858 podStartE2EDuration="2m6.465822858s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.464154931 +0000 UTC m=+148.402613749" watchObservedRunningTime="2025-09-30 19:49:06.465822858 +0000 UTC m=+148.404281676" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.473023 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.473432 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:06.973417293 +0000 UTC m=+148.911876111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.574185 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.575028 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.075011594 +0000 UTC m=+149.013470402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.608028 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" podStartSLOduration=126.608015727 podStartE2EDuration="2m6.608015727s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.543902045 +0000 UTC m=+148.482360863" watchObservedRunningTime="2025-09-30 19:49:06.608015727 +0000 UTC m=+148.546474545" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.608110 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" podStartSLOduration=126.608107199 podStartE2EDuration="2m6.608107199s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.606561366 +0000 UTC m=+148.545020184" watchObservedRunningTime="2025-09-30 19:49:06.608107199 +0000 UTC m=+148.546566017" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.638346 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:06 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:06 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:06 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.638396 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.651688 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-7gjfv" podStartSLOduration=127.651673891 podStartE2EDuration="2m7.651673891s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:06.648539572 +0000 UTC m=+148.586998390" watchObservedRunningTime="2025-09-30 19:49:06.651673891 +0000 UTC m=+148.590132709" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.676440 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.676891 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.676940 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.676964 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.680586 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.180568327 +0000 UTC m=+149.119027145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.681335 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.688106 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.701826 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.720550 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.738791 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.779576 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.780015 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.780570 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.280555283 +0000 UTC m=+149.219014101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.796780 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.881116 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.881387 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.381375412 +0000 UTC m=+149.319834230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:06 crc kubenswrapper[4603]: I0930 19:49:06.981613 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:06 crc kubenswrapper[4603]: E0930 19:49:06.981932 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.481917574 +0000 UTC m=+149.420376392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.013411 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.085083 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.085887 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.585873971 +0000 UTC m=+149.524332789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.186306 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.186879 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.686863995 +0000 UTC m=+149.625322813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.257460 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" event={"ID":"37afc549-2a93-48cb-85d9-ad284888e2f5","Type":"ContainerStarted","Data":"eac00c1d96ae195f89ffeebba5fb2be6c2c0346eeff82449ea707c66b57e0c92"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.280370 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" event={"ID":"90a9f0f4-2f8d-4ce9-98fd-db23877e381f","Type":"ContainerStarted","Data":"6a93554cab348d3d9f89e9c1800533e04595a89a321746263f9fc8ea73540635"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.280427 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" event={"ID":"90a9f0f4-2f8d-4ce9-98fd-db23877e381f","Type":"ContainerStarted","Data":"87d9f55abbe5bb00b2bf81e5939413450a173d8ac12f098bae75fee63e69821c"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.283976 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" event={"ID":"3406e452-0d64-4882-bbcf-46486cbbb1d1","Type":"ContainerStarted","Data":"486c313fa9628dd5d3989d68c18207f98a87e1cacda50662ba9a6eb726f8f0a6"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.284034 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" event={"ID":"3406e452-0d64-4882-bbcf-46486cbbb1d1","Type":"ContainerStarted","Data":"6ad207dc74133e6f9ab032d7649089c1563f46b2628ec743ae9e80dbe3aae091"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.288778 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.289050 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.789039872 +0000 UTC m=+149.727498690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.302291 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" event={"ID":"0828680f-a2ec-4519-9704-206f7b7feb35","Type":"ContainerStarted","Data":"b70995ea66c308aaeda9fe7c42def25e9ca89220bca6a9df93644ee519be8635"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.302335 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6x6gq" event={"ID":"0828680f-a2ec-4519-9704-206f7b7feb35","Type":"ContainerStarted","Data":"bfe1097f214a706b3620f9376b1fbc1043999f80a005e3679bde5944898990d5"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.307069 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" podStartSLOduration=128.307057042 podStartE2EDuration="2m8.307057042s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.305092976 +0000 UTC m=+149.243551794" watchObservedRunningTime="2025-09-30 19:49:07.307057042 +0000 UTC m=+149.245515860" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.314335 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" event={"ID":"59f7faf2-57d2-4230-b6a8-88c30096e372","Type":"ContainerStarted","Data":"cef4bb6b587a46c792957c2aead430af542f05eeddc5d9f3298ad4e7321b4708"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.318206 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" event={"ID":"007744a9-1794-4ff2-ba1d-f7c6d794f987","Type":"ContainerStarted","Data":"41730e50f494a71e900af4853df8e4847ccd8310b04a55906179d2d525ad6f81"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.320595 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2ctlk" event={"ID":"1c61ac02-3732-4bf3-b488-ac09d35092f4","Type":"ContainerStarted","Data":"423755c3aaed1fef5a03d12432ddc2863c6e410c632bd6cd54a44e5a8a91bfca"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.327830 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2tk5z" podStartSLOduration=127.327816878 podStartE2EDuration="2m7.327816878s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.327731206 +0000 UTC m=+149.266190024" watchObservedRunningTime="2025-09-30 19:49:07.327816878 +0000 UTC m=+149.266275696" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.335598 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-89sx8" event={"ID":"50ec5665-fef7-43c9-9143-29200698525d","Type":"ContainerStarted","Data":"4823ecfeb3579c1aa416f593ecf00b9df01b657ac9ca4173d6df269bf2dca319"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.350986 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" podStartSLOduration=127.350968082 podStartE2EDuration="2m7.350968082s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.350433307 +0000 UTC m=+149.288892125" watchObservedRunningTime="2025-09-30 19:49:07.350968082 +0000 UTC m=+149.289426900" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.352313 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8s2lm" event={"ID":"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2","Type":"ContainerStarted","Data":"d5289591bc82d02324e3bfefe6201be65f1aa9ca2320d97cda7e0e3920572e5a"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.352372 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-8s2lm" event={"ID":"61e28abf-6deb-4c8b-be7e-9a1d226ebdc2","Type":"ContainerStarted","Data":"3455436f97904af96c0cfe9940637400bc6a9bf6abc033b245b888f764f159a0"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.352922 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.359139 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" event={"ID":"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6","Type":"ContainerStarted","Data":"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.359491 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.361728 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" event={"ID":"e0d835d7-9572-48dd-b237-cfa225e29d88","Type":"ContainerStarted","Data":"81db7e9b4bb870721d096dd5295fa0d5529d9c0e7763e6555b0bc80c99858d60"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.363574 4603 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pq477 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.363617 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.373671 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nk79f" podStartSLOduration=127.373655554 podStartE2EDuration="2m7.373655554s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.372291265 +0000 UTC m=+149.310750083" watchObservedRunningTime="2025-09-30 19:49:07.373655554 +0000 UTC m=+149.312114372" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.378866 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" event={"ID":"7c0c5692-7618-4522-9a4d-e10d7027c791","Type":"ContainerStarted","Data":"d55fba74b1d30baf3a14fe8ca68e246264d3be1c752983a6abfc6f1614f1133d"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.378904 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" event={"ID":"7c0c5692-7618-4522-9a4d-e10d7027c791","Type":"ContainerStarted","Data":"1965e7a247b1b37c4bb1604439fa4dd72940377b275b056afabc7782a550979e"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.378916 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.380204 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7gjfv" event={"ID":"bd8dd34e-aa2d-4388-9d52-299033710686","Type":"ContainerStarted","Data":"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.387563 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" event={"ID":"79714c45-e39c-431c-b45a-eb244926ced5","Type":"ContainerStarted","Data":"0b0f59566fd66caec6cda0ec3d62d3213e308750bfc5d4464818a9b3d75a4642"} Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.389176 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.389220 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.389254 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.391240 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.89122494 +0000 UTC m=+149.829683758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.391309 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.394379 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:07.894362279 +0000 UTC m=+149.832821097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.403898 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.409904 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5drd5" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.442437 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n2c2r" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.454749 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" podStartSLOduration=127.454733285 podStartE2EDuration="2m7.454733285s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.424409919 +0000 UTC m=+149.362868737" watchObservedRunningTime="2025-09-30 19:49:07.454733285 +0000 UTC m=+149.393192103" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.489437 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-p72j4" podStartSLOduration=127.489422405 podStartE2EDuration="2m7.489422405s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.487532922 +0000 UTC m=+149.425991740" watchObservedRunningTime="2025-09-30 19:49:07.489422405 +0000 UTC m=+149.427881223" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.489722 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-8s2lm" podStartSLOduration=9.489718384 podStartE2EDuration="9.489718384s" podCreationTimestamp="2025-09-30 19:48:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.455511467 +0000 UTC m=+149.393970285" watchObservedRunningTime="2025-09-30 19:49:07.489718384 +0000 UTC m=+149.428177202" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.500557 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.502085 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.002070812 +0000 UTC m=+149.940529630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.564819 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-jv7tl" podStartSLOduration=128.564796516 podStartE2EDuration="2m8.564796516s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.517766306 +0000 UTC m=+149.456225124" watchObservedRunningTime="2025-09-30 19:49:07.564796516 +0000 UTC m=+149.503255334" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.565859 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" podStartSLOduration=127.565853285 podStartE2EDuration="2m7.565853285s" podCreationTimestamp="2025-09-30 19:47:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:07.564202499 +0000 UTC m=+149.502661327" watchObservedRunningTime="2025-09-30 19:49:07.565853285 +0000 UTC m=+149.504312103" Sep 30 19:49:07 crc kubenswrapper[4603]: W0930 19:49:07.588638 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-7b822b2ed589817a1dda1ab4fbd5c77078df8ff520e44315b3cc51a44f1e0950 WatchSource:0}: Error finding container 7b822b2ed589817a1dda1ab4fbd5c77078df8ff520e44315b3cc51a44f1e0950: Status 404 returned error can't find the container with id 7b822b2ed589817a1dda1ab4fbd5c77078df8ff520e44315b3cc51a44f1e0950 Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.602316 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.602605 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.102594523 +0000 UTC m=+150.041053331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.632693 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:07 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:07 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:07 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.632756 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.705634 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.705955 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.205941194 +0000 UTC m=+150.144400002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.813303 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.813607 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.313596207 +0000 UTC m=+150.252055025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:07 crc kubenswrapper[4603]: W0930 19:49:07.892752 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-49f126f1afdc0955215a3a03a6beb6541d33e7ec2a15f27e65553b81b8f4a6d0 WatchSource:0}: Error finding container 49f126f1afdc0955215a3a03a6beb6541d33e7ec2a15f27e65553b81b8f4a6d0: Status 404 returned error can't find the container with id 49f126f1afdc0955215a3a03a6beb6541d33e7ec2a15f27e65553b81b8f4a6d0 Sep 30 19:49:07 crc kubenswrapper[4603]: I0930 19:49:07.914276 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:07 crc kubenswrapper[4603]: E0930 19:49:07.914619 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.414602181 +0000 UTC m=+150.353060999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.028022 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.028365 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.528350745 +0000 UTC m=+150.466809563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.129448 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.129811 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.629798082 +0000 UTC m=+150.568256900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.232124 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.232552 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.732535446 +0000 UTC m=+150.670994264 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.333302 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.333462 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.833439717 +0000 UTC m=+150.771898535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.333863 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.334172 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.834146357 +0000 UTC m=+150.772605165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.388414 4603 patch_prober.go:28] interesting pod/console-operator-58897d9998-cb8bv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.388485 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" podUID="ce0e78ed-af1c-4419-b6a2-bb1ba7f54afd" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.388667 4603 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fbk8r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.388731 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" podUID="05a079f5-4049-4140-af22-5271a97fee7a" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.420008 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ee7ddbc3c5d08ce7a7734a4b2f7a8a10209c30ebf586b6a16911effb289059cb"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.420052 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1413ebbcde377410d5c93400de5ddaf3739e42c7a1b5b1b8059687f263a8998d"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.420299 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.427596 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" event={"ID":"37afc549-2a93-48cb-85d9-ad284888e2f5","Type":"ContainerStarted","Data":"8891fc9a7bb6065af99a0e07faf97a84d47f6bead26a59353e73bccd27614782"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.431037 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"88c77ca8d2f5b4b0a2520621cf7436a3976b21ad05b5e0c204ee25da3edc2c07"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.431133 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"49f126f1afdc0955215a3a03a6beb6541d33e7ec2a15f27e65553b81b8f4a6d0"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.434467 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.434788 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:08.934774621 +0000 UTC m=+150.873233439 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.441270 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.441308 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.451456 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0b6673eb46873482ef21faad9bab3ea85e51043ea6b4fe9aca0c073f07781db1"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.451493 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7b822b2ed589817a1dda1ab4fbd5c77078df8ff520e44315b3cc51a44f1e0950"} Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.452999 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.453031 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.457180 4603 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pq477 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.457222 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.536609 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.539414 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.039399338 +0000 UTC m=+150.977858156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.638001 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.638314 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.138289382 +0000 UTC m=+151.076748200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.638369 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.638735 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.138718694 +0000 UTC m=+151.077177512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.643732 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:08 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:08 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:08 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.643812 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.739455 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.739688 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.239631116 +0000 UTC m=+151.178089934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.739756 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.740065 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.240055368 +0000 UTC m=+151.178514186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.840268 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.840451 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.340424975 +0000 UTC m=+151.278883803 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.840863 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.841195 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.341187806 +0000 UTC m=+151.279646624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.941665 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.941843 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.441803679 +0000 UTC m=+151.380262497 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:08 crc kubenswrapper[4603]: I0930 19:49:08.941945 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:08 crc kubenswrapper[4603]: E0930 19:49:08.942313 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.442289813 +0000 UTC m=+151.380748631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.043123 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.043311 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.543281788 +0000 UTC m=+151.481740596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.043425 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.043698 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.543686879 +0000 UTC m=+151.482145697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.146583 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.146924 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.646909896 +0000 UTC m=+151.585368704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.210904 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.211805 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.214582 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.247789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.248066 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.748055025 +0000 UTC m=+151.686513843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.296012 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.331381 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fbk8r" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.349010 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.349311 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh6wj\" (UniqueName: \"kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.349358 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.349391 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.349542 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.849523962 +0000 UTC m=+151.787982780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.402783 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.403634 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.414567 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.431584 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.450895 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.450986 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.451034 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh6wj\" (UniqueName: \"kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.451056 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.451839 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.451890 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.452197 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:09.952184033 +0000 UTC m=+151.890642851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.457601 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" event={"ID":"37afc549-2a93-48cb-85d9-ad284888e2f5","Type":"ContainerStarted","Data":"2ffc18d7de4b4e799525a74e5fb51d44d0781494da6042e347a2802d67307205"} Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.507969 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh6wj\" (UniqueName: \"kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj\") pod \"certified-operators-8rxlc\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.525555 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.545675 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.546251 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.551964 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.552320 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.552372 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbtk8\" (UniqueName: \"kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.552390 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.552473 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.052458647 +0000 UTC m=+151.990917465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.560402 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.560606 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.568210 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.633144 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:09 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:09 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:09 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.633211 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653667 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653702 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653747 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653781 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653796 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbtk8\" (UniqueName: \"kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.653819 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.654064 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.154053108 +0000 UTC m=+152.092511926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.654414 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.654626 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.659747 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.665616 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.707723 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbtk8\" (UniqueName: \"kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8\") pod \"community-operators-9k5ct\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.721115 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.725406 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754358 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754492 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62f6j\" (UniqueName: \"kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754522 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754585 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754616 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754634 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.754685 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.754746 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.254733863 +0000 UTC m=+152.193192681 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.808681 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.820261 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.821145 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.855803 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.855873 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.855895 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.855931 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62f6j\" (UniqueName: \"kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.856461 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.856679 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.356669004 +0000 UTC m=+152.295127822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.856985 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.892248 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.903281 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62f6j\" (UniqueName: \"kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j\") pod \"certified-operators-mx7fc\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.905744 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.956587 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.956765 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slfv6\" (UniqueName: \"kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.956809 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.956834 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:09 crc kubenswrapper[4603]: E0930 19:49:09.956951 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.456936328 +0000 UTC m=+152.395395136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:09 crc kubenswrapper[4603]: I0930 19:49:09.980491 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.058792 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.059060 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.059099 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.059138 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slfv6\" (UniqueName: \"kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.059941 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.065286 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.066440 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.566418572 +0000 UTC m=+152.504877470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.107643 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slfv6\" (UniqueName: \"kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6\") pod \"community-operators-mhq5r\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.156132 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.160655 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.161009 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.660990353 +0000 UTC m=+152.599449171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.264580 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.264859 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.764847089 +0000 UTC m=+152.703305907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.365947 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.366267 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.866254255 +0000 UTC m=+152.804713063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.371391 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.461628 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerStarted","Data":"9179dd4849629cd99256174305419e555545d357463fad99bcd0b74e56db2cf9"} Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.469461 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.469702 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:10.969690568 +0000 UTC m=+152.908149386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.478307 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" event={"ID":"37afc549-2a93-48cb-85d9-ad284888e2f5","Type":"ContainerStarted","Data":"d6ab8ccbcaf6f6d2906644cdecfb7c36d3065656e9d47f53010c7df7ff54d3e7"} Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.505059 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5lmct" podStartSLOduration=12.505045077 podStartE2EDuration="12.505045077s" podCreationTimestamp="2025-09-30 19:48:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:10.504852581 +0000 UTC m=+152.443311399" watchObservedRunningTime="2025-09-30 19:49:10.505045077 +0000 UTC m=+152.443503895" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.570763 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.570989 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.07096466 +0000 UTC m=+153.009423478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.571240 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.572270 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.072262776 +0000 UTC m=+153.010721594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.639337 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:10 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:10 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:10 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.639386 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.673021 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.673313 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.173299762 +0000 UTC m=+153.111758580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.754152 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.773957 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.774387 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.274375428 +0000 UTC m=+153.212834246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.820510 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.837373 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.874543 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.875015 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.375000072 +0000 UTC m=+153.313458890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:10 crc kubenswrapper[4603]: I0930 19:49:10.984823 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:10 crc kubenswrapper[4603]: E0930 19:49:10.985153 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.485141945 +0000 UTC m=+153.423600763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.007186 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:49:11 crc kubenswrapper[4603]: W0930 19:49:11.032379 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93ba0bd6_e15b_47fb_8168_c77b4c4b4bb8.slice/crio-5c51764a324019cf3b902cff626b77f844bfdf4bb3095f870da897ca90e1fdc7 WatchSource:0}: Error finding container 5c51764a324019cf3b902cff626b77f844bfdf4bb3095f870da897ca90e1fdc7: Status 404 returned error can't find the container with id 5c51764a324019cf3b902cff626b77f844bfdf4bb3095f870da897ca90e1fdc7 Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.085591 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.085964 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.585948344 +0000 UTC m=+153.524407162 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.186873 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.187411 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.68739986 +0000 UTC m=+153.625858678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.194180 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.194590 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.223013 4603 patch_prober.go:28] interesting pod/apiserver-76f77b778f-8jxkn container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]log ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]etcd ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/max-in-flight-filter ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 30 19:49:11 crc kubenswrapper[4603]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 30 19:49:11 crc kubenswrapper[4603]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/project.openshift.io-projectcache ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-startinformers ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 30 19:49:11 crc kubenswrapper[4603]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 19:49:11 crc kubenswrapper[4603]: livez check failed Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.223071 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" podUID="90a9f0f4-2f8d-4ce9-98fd-db23877e381f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.288427 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.288617 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.788569079 +0000 UTC m=+153.727027897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.288772 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.289499 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.789489255 +0000 UTC m=+153.727948073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.293829 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-cb8bv" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.344856 4603 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.353578 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.356079 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.364420 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.392513 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.392539 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.892523807 +0000 UTC m=+153.830982625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.393140 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.393808 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:11.893800854 +0000 UTC m=+153.832259672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.395450 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.396900 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.399201 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.420928 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.484341 4603 generic.go:334] "Generic (PLEG): container finished" podID="436ba6f1-35f4-4952-b793-c73d9585c715" containerID="5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532" exitCode=0 Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.484409 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerDied","Data":"5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.484433 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerStarted","Data":"eed7ec8c53e747bb8465b348b1c96c9971f2983b61582fdfc85739d9997e34a5"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.485640 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4","Type":"ContainerStarted","Data":"968a12abfaf5d7f2b71e624043f9145b453a7f7ffcf75bf1e6c0ae87c4b51edd"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.485659 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4","Type":"ContainerStarted","Data":"152f856fc595c3aa5176fb4abc2f432e734d6802a9dfe2d0a53b5c673f94c45b"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.486480 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.488042 4603 generic.go:334] "Generic (PLEG): container finished" podID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerID="bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e" exitCode=0 Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.488081 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerDied","Data":"bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.488096 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerStarted","Data":"23f419e335b88b9115e9aa9fa678833c1e9b92eb897dbaa7ba0a93bd89b42396"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.490282 4603 generic.go:334] "Generic (PLEG): container finished" podID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerID="9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b" exitCode=0 Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.490361 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerDied","Data":"9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.490387 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerStarted","Data":"5c51764a324019cf3b902cff626b77f844bfdf4bb3095f870da897ca90e1fdc7"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.508635 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.509023 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.008997589 +0000 UTC m=+153.947456407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.509132 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.509282 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.509442 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9wfl\" (UniqueName: \"kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.509473 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.510677 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.010663785 +0000 UTC m=+153.949122604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.513490 4603 generic.go:334] "Generic (PLEG): container finished" podID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerID="9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738" exitCode=0 Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.513813 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerDied","Data":"9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738"} Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.526229 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bp267" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.576890 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.576869147 podStartE2EDuration="2.576869147s" podCreationTimestamp="2025-09-30 19:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:11.574345005 +0000 UTC m=+153.512803833" watchObservedRunningTime="2025-09-30 19:49:11.576869147 +0000 UTC m=+153.515327965" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.610211 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.610384 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.110352673 +0000 UTC m=+154.048811491 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.610481 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9wfl\" (UniqueName: \"kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.610538 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.611456 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.611766 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.611925 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.613212 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.613509 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.113493852 +0000 UTC m=+154.051952670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.626146 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.633521 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:11 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:11 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:11 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.633581 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.638409 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9wfl\" (UniqueName: \"kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl\") pod \"redhat-marketplace-bsmjt\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.711264 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.715128 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.715273 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.215251977 +0000 UTC m=+154.153710795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.715400 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.716471 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.216462732 +0000 UTC m=+154.154921550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.750947 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.751020 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.751309 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.751427 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.782941 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.783880 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.794822 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.818945 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.819188 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.319149873 +0000 UTC m=+154.257608691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.819319 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.819617 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.319606007 +0000 UTC m=+154.258064825 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.922046 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.922306 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.422281388 +0000 UTC m=+154.360740206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.922539 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.922565 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.922588 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bblz\" (UniqueName: \"kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.922633 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:11 crc kubenswrapper[4603]: E0930 19:49:11.922945 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 19:49:12.422932656 +0000 UTC m=+154.361391474 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-jrnm6" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.937928 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:49:11 crc kubenswrapper[4603]: I0930 19:49:11.947035 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.001292 4603 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T19:49:11.344882531Z","Handler":null,"Name":""} Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.003799 4603 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.003838 4603 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.023016 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.023417 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.023454 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bblz\" (UniqueName: \"kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.023514 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.024122 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.024268 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.029733 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.048044 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bblz\" (UniqueName: \"kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz\") pod \"redhat-marketplace-l582d\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.077205 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.077250 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.078676 4603 patch_prober.go:28] interesting pod/console-f9d7485db-7gjfv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.078731 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7gjfv" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.099373 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.125613 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.144803 4603 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.144840 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.180563 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-jrnm6\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.384830 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.386414 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.389826 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.405977 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.473699 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.497628 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:49:12 crc kubenswrapper[4603]: W0930 19:49:12.517880 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b72c703_50e4_40a8_8d48_f44937ac8f4a.slice/crio-a117220a1e451736d76965a047a8d184c0b10043509d76d56cef4dc03b1cd8fe WatchSource:0}: Error finding container a117220a1e451736d76965a047a8d184c0b10043509d76d56cef4dc03b1cd8fe: Status 404 returned error can't find the container with id a117220a1e451736d76965a047a8d184c0b10043509d76d56cef4dc03b1cd8fe Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.538564 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.538696 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb9pw\" (UniqueName: \"kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.538759 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.575425 4603 generic.go:334] "Generic (PLEG): container finished" podID="7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" containerID="968a12abfaf5d7f2b71e624043f9145b453a7f7ffcf75bf1e6c0ae87c4b51edd" exitCode=0 Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.576566 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4","Type":"ContainerDied","Data":"968a12abfaf5d7f2b71e624043f9145b453a7f7ffcf75bf1e6c0ae87c4b51edd"} Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.611560 4603 generic.go:334] "Generic (PLEG): container finished" podID="4db563e4-c2e0-43d9-bc1c-0961bca8cf09" containerID="037a89d66a349dee24f94b7d9e9e69664e188515820a458257236d6ba0437a15" exitCode=0 Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.611633 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" event={"ID":"4db563e4-c2e0-43d9-bc1c-0961bca8cf09","Type":"ContainerDied","Data":"037a89d66a349dee24f94b7d9e9e69664e188515820a458257236d6ba0437a15"} Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.625025 4603 generic.go:334] "Generic (PLEG): container finished" podID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerID="7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50" exitCode=0 Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.626276 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerDied","Data":"7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50"} Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.626324 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerStarted","Data":"c80c40dee1a82a0253b1c4c4536f102ee3c25a1bc194df9ec3c6e308c25f0c47"} Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.629988 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:12 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:12 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:12 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.630052 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.642880 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb9pw\" (UniqueName: \"kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.642952 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.642997 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.644813 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.645328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.671943 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb9pw\" (UniqueName: \"kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw\") pod \"redhat-operators-v27rc\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.722765 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.775731 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.789776 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.790765 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.806556 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.949216 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bh62\" (UniqueName: \"kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.949487 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.949540 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:12 crc kubenswrapper[4603]: I0930 19:49:12.964063 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:49:12 crc kubenswrapper[4603]: W0930 19:49:12.973689 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4af16ef_bdd1_4804_a11c_d9eda6ed782a.slice/crio-827a9154a4edee962a92944deb25ec0785e87d8132d8d0ccecee27f4f542208c WatchSource:0}: Error finding container 827a9154a4edee962a92944deb25ec0785e87d8132d8d0ccecee27f4f542208c: Status 404 returned error can't find the container with id 827a9154a4edee962a92944deb25ec0785e87d8132d8d0ccecee27f4f542208c Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.050645 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bh62\" (UniqueName: \"kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.050716 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.050762 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.051125 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.051618 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.074665 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bh62\" (UniqueName: \"kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62\") pod \"redhat-operators-xpm65\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.132190 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.407961 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.547086 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:49:13 crc kubenswrapper[4603]: W0930 19:49:13.605549 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b36b6e6_0ac7_4444_82a8_9bfc58fd604a.slice/crio-3168d25d87d2665c914619172849aa22d599aa31061a044443f9068841b7a7b6 WatchSource:0}: Error finding container 3168d25d87d2665c914619172849aa22d599aa31061a044443f9068841b7a7b6: Status 404 returned error can't find the container with id 3168d25d87d2665c914619172849aa22d599aa31061a044443f9068841b7a7b6 Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.631371 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:13 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:13 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:13 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.631426 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.692652 4603 generic.go:334] "Generic (PLEG): container finished" podID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerID="8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1" exitCode=0 Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.692725 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerDied","Data":"8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1"} Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.693037 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerStarted","Data":"a117220a1e451736d76965a047a8d184c0b10043509d76d56cef4dc03b1cd8fe"} Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.716651 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerStarted","Data":"3168d25d87d2665c914619172849aa22d599aa31061a044443f9068841b7a7b6"} Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.719200 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" event={"ID":"a4af16ef-bdd1-4804-a11c-d9eda6ed782a","Type":"ContainerStarted","Data":"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578"} Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.719224 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" event={"ID":"a4af16ef-bdd1-4804-a11c-d9eda6ed782a","Type":"ContainerStarted","Data":"827a9154a4edee962a92944deb25ec0785e87d8132d8d0ccecee27f4f542208c"} Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.719722 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:13 crc kubenswrapper[4603]: I0930 19:49:13.734652 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerStarted","Data":"b5ff32a7d478eaf03b263d844499f993b5655fc7832bfaa4950c3600040f5e56"} Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.184702 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.203593 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" podStartSLOduration=135.203574188 podStartE2EDuration="2m15.203574188s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:13.792990774 +0000 UTC m=+155.731449592" watchObservedRunningTime="2025-09-30 19:49:14.203574188 +0000 UTC m=+156.142033006" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.254056 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.277039 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir\") pod \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.277150 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access\") pod \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\" (UID: \"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4\") " Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.277396 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" (UID: "7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.277924 4603 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.289486 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" (UID: "7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.379230 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsfgt\" (UniqueName: \"kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt\") pod \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.379580 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume\") pod \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.379623 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume\") pod \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\" (UID: \"4db563e4-c2e0-43d9-bc1c-0961bca8cf09\") " Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.380020 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.380202 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume" (OuterVolumeSpecName: "config-volume") pod "4db563e4-c2e0-43d9-bc1c-0961bca8cf09" (UID: "4db563e4-c2e0-43d9-bc1c-0961bca8cf09"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.391211 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt" (OuterVolumeSpecName: "kube-api-access-dsfgt") pod "4db563e4-c2e0-43d9-bc1c-0961bca8cf09" (UID: "4db563e4-c2e0-43d9-bc1c-0961bca8cf09"). InnerVolumeSpecName "kube-api-access-dsfgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.392193 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4db563e4-c2e0-43d9-bc1c-0961bca8cf09" (UID: "4db563e4-c2e0-43d9-bc1c-0961bca8cf09"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.481250 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsfgt\" (UniqueName: \"kubernetes.io/projected/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-kube-api-access-dsfgt\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.481287 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.481296 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4db563e4-c2e0-43d9-bc1c-0961bca8cf09-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.635900 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:14 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:14 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:14 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.635948 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.758931 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.758942 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j" event={"ID":"4db563e4-c2e0-43d9-bc1c-0961bca8cf09","Type":"ContainerDied","Data":"83e4f8409ff19461e303b8c6d634a6c07163d3fb325e97d691d4332d69b690d0"} Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.759364 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83e4f8409ff19461e303b8c6d634a6c07163d3fb325e97d691d4332d69b690d0" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.772178 4603 generic.go:334] "Generic (PLEG): container finished" podID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerID="ba6f1fa3384c745ca09695b40ed829fe1167e5fd587062b751138fa33ec7061a" exitCode=0 Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.777481 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerDied","Data":"ba6f1fa3384c745ca09695b40ed829fe1167e5fd587062b751138fa33ec7061a"} Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.777835 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.812269 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4","Type":"ContainerDied","Data":"152f856fc595c3aa5176fb4abc2f432e734d6802a9dfe2d0a53b5c673f94c45b"} Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.812351 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="152f856fc595c3aa5176fb4abc2f432e734d6802a9dfe2d0a53b5c673f94c45b" Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.821762 4603 generic.go:334] "Generic (PLEG): container finished" podID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerID="7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d" exitCode=0 Sep 30 19:49:14 crc kubenswrapper[4603]: I0930 19:49:14.822474 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerDied","Data":"7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d"} Sep 30 19:49:15 crc kubenswrapper[4603]: I0930 19:49:15.629469 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:15 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:15 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:15 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:15 crc kubenswrapper[4603]: I0930 19:49:15.629523 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.114851 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:49:16 crc kubenswrapper[4603]: E0930 19:49:16.115289 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" containerName="pruner" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.115299 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" containerName="pruner" Sep 30 19:49:16 crc kubenswrapper[4603]: E0930 19:49:16.115318 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db563e4-c2e0-43d9-bc1c-0961bca8cf09" containerName="collect-profiles" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.115324 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db563e4-c2e0-43d9-bc1c-0961bca8cf09" containerName="collect-profiles" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.115419 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4db563e4-c2e0-43d9-bc1c-0961bca8cf09" containerName="collect-profiles" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.115436 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c8634ba-a2fa-4e2c-bd5a-1dbf513125f4" containerName="pruner" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.115784 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.118712 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.120318 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.128878 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.198234 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.203439 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8jxkn" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.263922 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.263977 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.365253 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.365300 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.366823 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.389512 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.433908 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.630073 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:16 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:16 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:16 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.630550 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:16 crc kubenswrapper[4603]: I0930 19:49:16.989497 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-8s2lm" Sep 30 19:49:17 crc kubenswrapper[4603]: I0930 19:49:17.005430 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 19:49:17 crc kubenswrapper[4603]: I0930 19:49:17.582429 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:49:17 crc kubenswrapper[4603]: I0930 19:49:17.643461 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:17 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:17 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:17 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:17 crc kubenswrapper[4603]: I0930 19:49:17.643534 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:17 crc kubenswrapper[4603]: I0930 19:49:17.844405 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"277e6095-4a7c-4355-a944-3f7179b8f06e","Type":"ContainerStarted","Data":"77a8eefee8a638723755adefbfd097fff4267eb14f7dbc838d8d19ee8fc1ea0e"} Sep 30 19:49:18 crc kubenswrapper[4603]: I0930 19:49:18.629403 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:18 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:18 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:18 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:18 crc kubenswrapper[4603]: I0930 19:49:18.629645 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:18 crc kubenswrapper[4603]: I0930 19:49:18.878863 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"277e6095-4a7c-4355-a944-3f7179b8f06e","Type":"ContainerStarted","Data":"f327224433f0d31c5662e1f0957030490a7802bc2dbc46a5eb1d7adc4c5adbfa"} Sep 30 19:49:18 crc kubenswrapper[4603]: I0930 19:49:18.893412 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.893397682 podStartE2EDuration="2.893397682s" podCreationTimestamp="2025-09-30 19:49:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:18.891277672 +0000 UTC m=+160.829736500" watchObservedRunningTime="2025-09-30 19:49:18.893397682 +0000 UTC m=+160.831856500" Sep 30 19:49:19 crc kubenswrapper[4603]: I0930 19:49:19.629744 4603 patch_prober.go:28] interesting pod/router-default-5444994796-hswfh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 19:49:19 crc kubenswrapper[4603]: [-]has-synced failed: reason withheld Sep 30 19:49:19 crc kubenswrapper[4603]: [+]process-running ok Sep 30 19:49:19 crc kubenswrapper[4603]: healthz check failed Sep 30 19:49:19 crc kubenswrapper[4603]: I0930 19:49:19.629790 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hswfh" podUID="2ab2de99-0003-4d85-8cb8-fe347801f9d1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 19:49:19 crc kubenswrapper[4603]: I0930 19:49:19.895020 4603 generic.go:334] "Generic (PLEG): container finished" podID="277e6095-4a7c-4355-a944-3f7179b8f06e" containerID="f327224433f0d31c5662e1f0957030490a7802bc2dbc46a5eb1d7adc4c5adbfa" exitCode=0 Sep 30 19:49:19 crc kubenswrapper[4603]: I0930 19:49:19.895069 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"277e6095-4a7c-4355-a944-3f7179b8f06e","Type":"ContainerDied","Data":"f327224433f0d31c5662e1f0957030490a7802bc2dbc46a5eb1d7adc4c5adbfa"} Sep 30 19:49:20 crc kubenswrapper[4603]: I0930 19:49:20.629284 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:20 crc kubenswrapper[4603]: I0930 19:49:20.632986 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-hswfh" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.349181 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.476408 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir\") pod \"277e6095-4a7c-4355-a944-3f7179b8f06e\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.476511 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access\") pod \"277e6095-4a7c-4355-a944-3f7179b8f06e\" (UID: \"277e6095-4a7c-4355-a944-3f7179b8f06e\") " Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.476543 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "277e6095-4a7c-4355-a944-3f7179b8f06e" (UID: "277e6095-4a7c-4355-a944-3f7179b8f06e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.477200 4603 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/277e6095-4a7c-4355-a944-3f7179b8f06e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.481974 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "277e6095-4a7c-4355-a944-3f7179b8f06e" (UID: "277e6095-4a7c-4355-a944-3f7179b8f06e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.577770 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/277e6095-4a7c-4355-a944-3f7179b8f06e-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.751039 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.751484 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.751097 4603 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d2ld container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.751558 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2d2ld" podUID="722a6d6d-3382-415f-828c-db2fa023bbff" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.924072 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"277e6095-4a7c-4355-a944-3f7179b8f06e","Type":"ContainerDied","Data":"77a8eefee8a638723755adefbfd097fff4267eb14f7dbc838d8d19ee8fc1ea0e"} Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.924109 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77a8eefee8a638723755adefbfd097fff4267eb14f7dbc838d8d19ee8fc1ea0e" Sep 30 19:49:21 crc kubenswrapper[4603]: I0930 19:49:21.924362 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 19:49:22 crc kubenswrapper[4603]: I0930 19:49:22.078615 4603 patch_prober.go:28] interesting pod/console-f9d7485db-7gjfv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Sep 30 19:49:22 crc kubenswrapper[4603]: I0930 19:49:22.078667 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7gjfv" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Sep 30 19:49:22 crc kubenswrapper[4603]: I0930 19:49:22.998061 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:49:23 crc kubenswrapper[4603]: I0930 19:49:23.002336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/895a054c-b0e6-418a-9e96-b941b6e1946d-metrics-certs\") pod \"network-metrics-daemon-pwrc5\" (UID: \"895a054c-b0e6-418a-9e96-b941b6e1946d\") " pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:49:23 crc kubenswrapper[4603]: I0930 19:49:23.195441 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pwrc5" Sep 30 19:49:31 crc kubenswrapper[4603]: I0930 19:49:31.756100 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2d2ld" Sep 30 19:49:32 crc kubenswrapper[4603]: I0930 19:49:32.091689 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:32 crc kubenswrapper[4603]: I0930 19:49:32.094924 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:49:32 crc kubenswrapper[4603]: I0930 19:49:32.479757 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:49:38 crc kubenswrapper[4603]: I0930 19:49:38.441788 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:49:38 crc kubenswrapper[4603]: I0930 19:49:38.443361 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:49:41 crc kubenswrapper[4603]: I0930 19:49:41.903081 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x7f6d" Sep 30 19:49:47 crc kubenswrapper[4603]: I0930 19:49:47.674304 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.349775 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.349936 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6bblz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-l582d_openshift-marketplace(2b72c703-50e4-40a8-8d48-f44937ac8f4a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.351241 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-l582d" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.430350 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.430819 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xbtk8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9k5ct_openshift-marketplace(29ec1b09-de04-46c7-aa54-32c9aff58a89): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:50 crc kubenswrapper[4603]: E0930 19:49:50.432042 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9k5ct" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" Sep 30 19:49:52 crc kubenswrapper[4603]: E0930 19:49:52.920081 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9k5ct" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" Sep 30 19:49:52 crc kubenswrapper[4603]: E0930 19:49:52.920278 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-l582d" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.020859 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.021645 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-62f6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mx7fc_openshift-marketplace(436ba6f1-35f4-4952-b793-c73d9585c715): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.023130 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mx7fc" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.047681 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.047870 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-slfv6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-mhq5r_openshift-marketplace(93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.049094 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-mhq5r" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.076739 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.076889 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w9wfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bsmjt_openshift-marketplace(96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:53 crc kubenswrapper[4603]: E0930 19:49:53.078107 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bsmjt" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.522116 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mx7fc" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.522141 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-bsmjt" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.522259 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-mhq5r" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.596805 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.597137 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mb9pw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-v27rc_openshift-marketplace(eb6ecb74-a79b-41f9-802c-0f1bbc123df4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.598408 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-v27rc" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.609954 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.610304 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6bh62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xpm65_openshift-marketplace(0b36b6e6-0ac7-4444-82a8-9bfc58fd604a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 19:49:56 crc kubenswrapper[4603]: E0930 19:49:56.611606 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xpm65" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" Sep 30 19:49:56 crc kubenswrapper[4603]: I0930 19:49:56.934654 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pwrc5"] Sep 30 19:49:57 crc kubenswrapper[4603]: I0930 19:49:57.162825 4603 generic.go:334] "Generic (PLEG): container finished" podID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerID="60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f" exitCode=0 Sep 30 19:49:57 crc kubenswrapper[4603]: I0930 19:49:57.162975 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerDied","Data":"60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f"} Sep 30 19:49:57 crc kubenswrapper[4603]: I0930 19:49:57.164410 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" event={"ID":"895a054c-b0e6-418a-9e96-b941b6e1946d","Type":"ContainerStarted","Data":"86910734c25ecab0f7e94fcadf71f990a613328026698bc731bcad3a3f664cc0"} Sep 30 19:49:57 crc kubenswrapper[4603]: E0930 19:49:57.168605 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-v27rc" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" Sep 30 19:49:57 crc kubenswrapper[4603]: E0930 19:49:57.169053 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xpm65" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" Sep 30 19:49:58 crc kubenswrapper[4603]: I0930 19:49:58.177356 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerStarted","Data":"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659"} Sep 30 19:49:58 crc kubenswrapper[4603]: I0930 19:49:58.181031 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" event={"ID":"895a054c-b0e6-418a-9e96-b941b6e1946d","Type":"ContainerStarted","Data":"55e2c62623e2e9c57a20d3fcb1e782fbbd5430b813b837a3d7531c262aeaf93a"} Sep 30 19:49:58 crc kubenswrapper[4603]: I0930 19:49:58.181096 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pwrc5" event={"ID":"895a054c-b0e6-418a-9e96-b941b6e1946d","Type":"ContainerStarted","Data":"7ef4b9d5ba0255c3864769d50f4c4314248114ab91ef369aa5b42cdd6d154bc4"} Sep 30 19:49:58 crc kubenswrapper[4603]: I0930 19:49:58.209205 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8rxlc" podStartSLOduration=3.014458576 podStartE2EDuration="49.209156674s" podCreationTimestamp="2025-09-30 19:49:09 +0000 UTC" firstStartedPulling="2025-09-30 19:49:11.518280791 +0000 UTC m=+153.456739599" lastFinishedPulling="2025-09-30 19:49:57.712978849 +0000 UTC m=+199.651437697" observedRunningTime="2025-09-30 19:49:58.20513115 +0000 UTC m=+200.143590008" watchObservedRunningTime="2025-09-30 19:49:58.209156674 +0000 UTC m=+200.147615522" Sep 30 19:49:58 crc kubenswrapper[4603]: I0930 19:49:58.233925 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-pwrc5" podStartSLOduration=179.233898445 podStartE2EDuration="2m59.233898445s" podCreationTimestamp="2025-09-30 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:49:58.231443715 +0000 UTC m=+200.169902613" watchObservedRunningTime="2025-09-30 19:49:58.233898445 +0000 UTC m=+200.172357313" Sep 30 19:49:59 crc kubenswrapper[4603]: I0930 19:49:59.539593 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:49:59 crc kubenswrapper[4603]: I0930 19:49:59.539657 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:50:00 crc kubenswrapper[4603]: I0930 19:50:00.661335 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8rxlc" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="registry-server" probeResult="failure" output=< Sep 30 19:50:00 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 19:50:00 crc kubenswrapper[4603]: > Sep 30 19:50:07 crc kubenswrapper[4603]: I0930 19:50:07.236029 4603 generic.go:334] "Generic (PLEG): container finished" podID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerID="562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf" exitCode=0 Sep 30 19:50:07 crc kubenswrapper[4603]: I0930 19:50:07.236727 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerDied","Data":"562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf"} Sep 30 19:50:08 crc kubenswrapper[4603]: I0930 19:50:08.441786 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:50:08 crc kubenswrapper[4603]: I0930 19:50:08.442025 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:50:08 crc kubenswrapper[4603]: I0930 19:50:08.442069 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:50:08 crc kubenswrapper[4603]: I0930 19:50:08.442828 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:50:08 crc kubenswrapper[4603]: I0930 19:50:08.442920 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92" gracePeriod=600 Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.246584 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerStarted","Data":"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9"} Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.249943 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92" exitCode=0 Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.249974 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92"} Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.250030 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b"} Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.251525 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerStarted","Data":"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55"} Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.253137 4603 generic.go:334] "Generic (PLEG): container finished" podID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerID="ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b" exitCode=0 Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.253194 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerDied","Data":"ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b"} Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.322313 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9k5ct" podStartSLOduration=3.189918368 podStartE2EDuration="1m0.322298949s" podCreationTimestamp="2025-09-30 19:49:09 +0000 UTC" firstStartedPulling="2025-09-30 19:49:11.48922284 +0000 UTC m=+153.427681658" lastFinishedPulling="2025-09-30 19:50:08.621603411 +0000 UTC m=+210.560062239" observedRunningTime="2025-09-30 19:50:09.318870442 +0000 UTC m=+211.257329260" watchObservedRunningTime="2025-09-30 19:50:09.322298949 +0000 UTC m=+211.260757767" Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.585829 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.626357 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.726391 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:50:09 crc kubenswrapper[4603]: I0930 19:50:09.726676 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.259964 4603 generic.go:334] "Generic (PLEG): container finished" podID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerID="1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd" exitCode=0 Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.260022 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerDied","Data":"1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd"} Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.268127 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerStarted","Data":"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9"} Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.270728 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerStarted","Data":"11bda2e23abce55298676758789de913b59bd2c4af55b6cc7986ede08e4faa06"} Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.272659 4603 generic.go:334] "Generic (PLEG): container finished" podID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerID="8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9" exitCode=0 Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.273077 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerDied","Data":"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9"} Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.334188 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l582d" podStartSLOduration=3.373824121 podStartE2EDuration="59.33417103s" podCreationTimestamp="2025-09-30 19:49:11 +0000 UTC" firstStartedPulling="2025-09-30 19:49:13.709302429 +0000 UTC m=+155.647761247" lastFinishedPulling="2025-09-30 19:50:09.669649338 +0000 UTC m=+211.608108156" observedRunningTime="2025-09-30 19:50:10.333621955 +0000 UTC m=+212.272080773" watchObservedRunningTime="2025-09-30 19:50:10.33417103 +0000 UTC m=+212.272629848" Sep 30 19:50:10 crc kubenswrapper[4603]: I0930 19:50:10.766153 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-9k5ct" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="registry-server" probeResult="failure" output=< Sep 30 19:50:10 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 19:50:10 crc kubenswrapper[4603]: > Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.281392 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerStarted","Data":"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283"} Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.283581 4603 generic.go:334] "Generic (PLEG): container finished" podID="436ba6f1-35f4-4952-b793-c73d9585c715" containerID="cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.283669 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerDied","Data":"cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f"} Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.285431 4603 generic.go:334] "Generic (PLEG): container finished" podID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerID="11bda2e23abce55298676758789de913b59bd2c4af55b6cc7986ede08e4faa06" exitCode=0 Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.285463 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerDied","Data":"11bda2e23abce55298676758789de913b59bd2c4af55b6cc7986ede08e4faa06"} Sep 30 19:50:11 crc kubenswrapper[4603]: I0930 19:50:11.312199 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v27rc" podStartSLOduration=3.41221413 podStartE2EDuration="59.312148992s" podCreationTimestamp="2025-09-30 19:49:12 +0000 UTC" firstStartedPulling="2025-09-30 19:49:14.838633944 +0000 UTC m=+156.777092762" lastFinishedPulling="2025-09-30 19:50:10.738568806 +0000 UTC m=+212.677027624" observedRunningTime="2025-09-30 19:50:11.310937888 +0000 UTC m=+213.249396716" watchObservedRunningTime="2025-09-30 19:50:11.312148992 +0000 UTC m=+213.250607850" Sep 30 19:50:12 crc kubenswrapper[4603]: I0930 19:50:12.100241 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:12 crc kubenswrapper[4603]: I0930 19:50:12.100303 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:12 crc kubenswrapper[4603]: I0930 19:50:12.159727 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:12 crc kubenswrapper[4603]: I0930 19:50:12.723795 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:50:12 crc kubenswrapper[4603]: I0930 19:50:12.724016 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.299533 4603 generic.go:334] "Generic (PLEG): container finished" podID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerID="90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631" exitCode=0 Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.299606 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerDied","Data":"90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631"} Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.303176 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerStarted","Data":"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9"} Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.305447 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerStarted","Data":"23d8d9a5e73ed635ed4d43d8973b6901e3566e720bdeed957d5e537d04241eec"} Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.310555 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerStarted","Data":"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248"} Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.338463 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mx7fc" podStartSLOduration=3.435463234 podStartE2EDuration="1m4.338444069s" podCreationTimestamp="2025-09-30 19:49:09 +0000 UTC" firstStartedPulling="2025-09-30 19:49:11.486229756 +0000 UTC m=+153.424688574" lastFinishedPulling="2025-09-30 19:50:12.389210581 +0000 UTC m=+214.327669409" observedRunningTime="2025-09-30 19:50:13.3374701 +0000 UTC m=+215.275928908" watchObservedRunningTime="2025-09-30 19:50:13.338444069 +0000 UTC m=+215.276902887" Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.359369 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bsmjt" podStartSLOduration=2.757986088 podStartE2EDuration="1m2.35935581s" podCreationTimestamp="2025-09-30 19:49:11 +0000 UTC" firstStartedPulling="2025-09-30 19:49:12.628200267 +0000 UTC m=+154.566659075" lastFinishedPulling="2025-09-30 19:50:12.229569969 +0000 UTC m=+214.168028797" observedRunningTime="2025-09-30 19:50:13.355713168 +0000 UTC m=+215.294171986" watchObservedRunningTime="2025-09-30 19:50:13.35935581 +0000 UTC m=+215.297814618" Sep 30 19:50:13 crc kubenswrapper[4603]: I0930 19:50:13.759640 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v27rc" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="registry-server" probeResult="failure" output=< Sep 30 19:50:13 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 19:50:13 crc kubenswrapper[4603]: > Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.353932 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerStarted","Data":"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45"} Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.373542 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xpm65" podStartSLOduration=9.883471197 podStartE2EDuration="1m7.373522725s" podCreationTimestamp="2025-09-30 19:49:12 +0000 UTC" firstStartedPulling="2025-09-30 19:49:14.773676238 +0000 UTC m=+156.712135056" lastFinishedPulling="2025-09-30 19:50:12.263727756 +0000 UTC m=+214.202186584" observedRunningTime="2025-09-30 19:50:13.372863723 +0000 UTC m=+215.311322541" watchObservedRunningTime="2025-09-30 19:50:19.373522725 +0000 UTC m=+221.311981543" Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.786741 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.823528 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mhq5r" podStartSLOduration=3.825588552 podStartE2EDuration="1m10.823511801s" podCreationTimestamp="2025-09-30 19:49:09 +0000 UTC" firstStartedPulling="2025-09-30 19:49:11.509100142 +0000 UTC m=+153.447558960" lastFinishedPulling="2025-09-30 19:50:18.507023351 +0000 UTC m=+220.445482209" observedRunningTime="2025-09-30 19:50:19.374944046 +0000 UTC m=+221.313402874" watchObservedRunningTime="2025-09-30 19:50:19.823511801 +0000 UTC m=+221.761970629" Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.833134 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.981560 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:19 crc kubenswrapper[4603]: I0930 19:50:19.981612 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:20 crc kubenswrapper[4603]: I0930 19:50:20.026335 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:20 crc kubenswrapper[4603]: I0930 19:50:20.156710 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:20 crc kubenswrapper[4603]: I0930 19:50:20.156784 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:20 crc kubenswrapper[4603]: I0930 19:50:20.420119 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:21 crc kubenswrapper[4603]: I0930 19:50:21.197768 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-mhq5r" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="registry-server" probeResult="failure" output=< Sep 30 19:50:21 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 19:50:21 crc kubenswrapper[4603]: > Sep 30 19:50:21 crc kubenswrapper[4603]: I0930 19:50:21.714709 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:50:21 crc kubenswrapper[4603]: I0930 19:50:21.714779 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:50:21 crc kubenswrapper[4603]: I0930 19:50:21.767771 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.148880 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.284688 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.376788 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mx7fc" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="registry-server" containerID="cri-o://9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9" gracePeriod=2 Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.441836 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.781291 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.812928 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.832299 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.971028 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content\") pod \"436ba6f1-35f4-4952-b793-c73d9585c715\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.971126 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities\") pod \"436ba6f1-35f4-4952-b793-c73d9585c715\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.971153 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62f6j\" (UniqueName: \"kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j\") pod \"436ba6f1-35f4-4952-b793-c73d9585c715\" (UID: \"436ba6f1-35f4-4952-b793-c73d9585c715\") " Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.971964 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities" (OuterVolumeSpecName: "utilities") pod "436ba6f1-35f4-4952-b793-c73d9585c715" (UID: "436ba6f1-35f4-4952-b793-c73d9585c715"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:22 crc kubenswrapper[4603]: I0930 19:50:22.977309 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j" (OuterVolumeSpecName: "kube-api-access-62f6j") pod "436ba6f1-35f4-4952-b793-c73d9585c715" (UID: "436ba6f1-35f4-4952-b793-c73d9585c715"). InnerVolumeSpecName "kube-api-access-62f6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.009421 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "436ba6f1-35f4-4952-b793-c73d9585c715" (UID: "436ba6f1-35f4-4952-b793-c73d9585c715"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.072574 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.072803 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/436ba6f1-35f4-4952-b793-c73d9585c715-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.072813 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62f6j\" (UniqueName: \"kubernetes.io/projected/436ba6f1-35f4-4952-b793-c73d9585c715-kube-api-access-62f6j\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.134421 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.135195 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.185791 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.384332 4603 generic.go:334] "Generic (PLEG): container finished" podID="436ba6f1-35f4-4952-b793-c73d9585c715" containerID="9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9" exitCode=0 Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.384398 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mx7fc" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.384476 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerDied","Data":"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9"} Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.384529 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mx7fc" event={"ID":"436ba6f1-35f4-4952-b793-c73d9585c715","Type":"ContainerDied","Data":"eed7ec8c53e747bb8465b348b1c96c9971f2983b61582fdfc85739d9997e34a5"} Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.384549 4603 scope.go:117] "RemoveContainer" containerID="9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.404782 4603 scope.go:117] "RemoveContainer" containerID="cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.412338 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.416703 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mx7fc"] Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.435882 4603 scope.go:117] "RemoveContainer" containerID="5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.435982 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.464090 4603 scope.go:117] "RemoveContainer" containerID="9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9" Sep 30 19:50:23 crc kubenswrapper[4603]: E0930 19:50:23.464793 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9\": container with ID starting with 9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9 not found: ID does not exist" containerID="9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.464823 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9"} err="failed to get container status \"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9\": rpc error: code = NotFound desc = could not find container \"9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9\": container with ID starting with 9b6c5d04233695365a1b548eb75fadada64032c12f1c70e4fb7376d595543ec9 not found: ID does not exist" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.464843 4603 scope.go:117] "RemoveContainer" containerID="cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f" Sep 30 19:50:23 crc kubenswrapper[4603]: E0930 19:50:23.465315 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f\": container with ID starting with cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f not found: ID does not exist" containerID="cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.465354 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f"} err="failed to get container status \"cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f\": rpc error: code = NotFound desc = could not find container \"cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f\": container with ID starting with cff6e591574b1faefecf765a92960fb642dd2470076efdc6d38045b9fa55b53f not found: ID does not exist" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.465379 4603 scope.go:117] "RemoveContainer" containerID="5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532" Sep 30 19:50:23 crc kubenswrapper[4603]: E0930 19:50:23.465652 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532\": container with ID starting with 5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532 not found: ID does not exist" containerID="5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532" Sep 30 19:50:23 crc kubenswrapper[4603]: I0930 19:50:23.465675 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532"} err="failed to get container status \"5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532\": rpc error: code = NotFound desc = could not find container \"5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532\": container with ID starting with 5c9cf7abd1059bc0b91e47857cd90ca710e29d94cf3bc5ef75f700290aaec532 not found: ID does not exist" Sep 30 19:50:24 crc kubenswrapper[4603]: I0930 19:50:24.677336 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:50:24 crc kubenswrapper[4603]: I0930 19:50:24.678077 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l582d" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="registry-server" containerID="cri-o://2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9" gracePeriod=2 Sep 30 19:50:24 crc kubenswrapper[4603]: I0930 19:50:24.772235 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" path="/var/lib/kubelet/pods/436ba6f1-35f4-4952-b793-c73d9585c715/volumes" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.034245 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.199609 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities\") pod \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.199880 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content\") pod \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.199929 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bblz\" (UniqueName: \"kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz\") pod \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\" (UID: \"2b72c703-50e4-40a8-8d48-f44937ac8f4a\") " Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.200387 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities" (OuterVolumeSpecName: "utilities") pod "2b72c703-50e4-40a8-8d48-f44937ac8f4a" (UID: "2b72c703-50e4-40a8-8d48-f44937ac8f4a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.205073 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz" (OuterVolumeSpecName: "kube-api-access-6bblz") pod "2b72c703-50e4-40a8-8d48-f44937ac8f4a" (UID: "2b72c703-50e4-40a8-8d48-f44937ac8f4a"). InnerVolumeSpecName "kube-api-access-6bblz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.213329 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b72c703-50e4-40a8-8d48-f44937ac8f4a" (UID: "2b72c703-50e4-40a8-8d48-f44937ac8f4a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.301407 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.301620 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b72c703-50e4-40a8-8d48-f44937ac8f4a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.301689 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bblz\" (UniqueName: \"kubernetes.io/projected/2b72c703-50e4-40a8-8d48-f44937ac8f4a-kube-api-access-6bblz\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.395468 4603 generic.go:334] "Generic (PLEG): container finished" podID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerID="2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9" exitCode=0 Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.395546 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l582d" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.395536 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerDied","Data":"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9"} Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.395877 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l582d" event={"ID":"2b72c703-50e4-40a8-8d48-f44937ac8f4a","Type":"ContainerDied","Data":"a117220a1e451736d76965a047a8d184c0b10043509d76d56cef4dc03b1cd8fe"} Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.395899 4603 scope.go:117] "RemoveContainer" containerID="2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.411894 4603 scope.go:117] "RemoveContainer" containerID="ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.425515 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.425572 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l582d"] Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.443241 4603 scope.go:117] "RemoveContainer" containerID="8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.462757 4603 scope.go:117] "RemoveContainer" containerID="2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9" Sep 30 19:50:25 crc kubenswrapper[4603]: E0930 19:50:25.463268 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9\": container with ID starting with 2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9 not found: ID does not exist" containerID="2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.463310 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9"} err="failed to get container status \"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9\": rpc error: code = NotFound desc = could not find container \"2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9\": container with ID starting with 2f65fa42d714c6190e4d0fc5effb8f051a31a71a88dfa17fba1ad0b8543f60a9 not found: ID does not exist" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.463337 4603 scope.go:117] "RemoveContainer" containerID="ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b" Sep 30 19:50:25 crc kubenswrapper[4603]: E0930 19:50:25.463747 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b\": container with ID starting with ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b not found: ID does not exist" containerID="ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.463789 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b"} err="failed to get container status \"ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b\": rpc error: code = NotFound desc = could not find container \"ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b\": container with ID starting with ab983398b0a0f7b9a0e1ef618a366c4b1d60c6a49ce7f6f1d33d9eced1d3296b not found: ID does not exist" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.463824 4603 scope.go:117] "RemoveContainer" containerID="8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1" Sep 30 19:50:25 crc kubenswrapper[4603]: E0930 19:50:25.464220 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1\": container with ID starting with 8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1 not found: ID does not exist" containerID="8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1" Sep 30 19:50:25 crc kubenswrapper[4603]: I0930 19:50:25.464252 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1"} err="failed to get container status \"8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1\": rpc error: code = NotFound desc = could not find container \"8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1\": container with ID starting with 8a7553b08191a3565e7e5b4e887cc746dd073e2c1af3b6c20dcfd00d76191ba1 not found: ID does not exist" Sep 30 19:50:26 crc kubenswrapper[4603]: I0930 19:50:26.770771 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" path="/var/lib/kubelet/pods/2b72c703-50e4-40a8-8d48-f44937ac8f4a/volumes" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.090984 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.091355 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xpm65" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="registry-server" containerID="cri-o://23d8d9a5e73ed635ed4d43d8973b6901e3566e720bdeed957d5e537d04241eec" gracePeriod=2 Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.415369 4603 generic.go:334] "Generic (PLEG): container finished" podID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerID="23d8d9a5e73ed635ed4d43d8973b6901e3566e720bdeed957d5e537d04241eec" exitCode=0 Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.415453 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerDied","Data":"23d8d9a5e73ed635ed4d43d8973b6901e3566e720bdeed957d5e537d04241eec"} Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.568348 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.741152 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content\") pod \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.741259 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bh62\" (UniqueName: \"kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62\") pod \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.741279 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities\") pod \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\" (UID: \"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a\") " Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.742095 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities" (OuterVolumeSpecName: "utilities") pod "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" (UID: "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.747951 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62" (OuterVolumeSpecName: "kube-api-access-6bh62") pod "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" (UID: "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a"). InnerVolumeSpecName "kube-api-access-6bh62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.823687 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" (UID: "0b36b6e6-0ac7-4444-82a8-9bfc58fd604a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.843040 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.843061 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bh62\" (UniqueName: \"kubernetes.io/projected/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-kube-api-access-6bh62\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:27 crc kubenswrapper[4603]: I0930 19:50:27.843072 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.421341 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xpm65" event={"ID":"0b36b6e6-0ac7-4444-82a8-9bfc58fd604a","Type":"ContainerDied","Data":"3168d25d87d2665c914619172849aa22d599aa31061a044443f9068841b7a7b6"} Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.421385 4603 scope.go:117] "RemoveContainer" containerID="23d8d9a5e73ed635ed4d43d8973b6901e3566e720bdeed957d5e537d04241eec" Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.421478 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xpm65" Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.435669 4603 scope.go:117] "RemoveContainer" containerID="11bda2e23abce55298676758789de913b59bd2c4af55b6cc7986ede08e4faa06" Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.448050 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.451260 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xpm65"] Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.476323 4603 scope.go:117] "RemoveContainer" containerID="ba6f1fa3384c745ca09695b40ed829fe1167e5fd587062b751138fa33ec7061a" Sep 30 19:50:28 crc kubenswrapper[4603]: I0930 19:50:28.770205 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" path="/var/lib/kubelet/pods/0b36b6e6-0ac7-4444-82a8-9bfc58fd604a/volumes" Sep 30 19:50:30 crc kubenswrapper[4603]: I0930 19:50:30.195050 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:30 crc kubenswrapper[4603]: I0930 19:50:30.230278 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:30 crc kubenswrapper[4603]: I0930 19:50:30.874773 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:50:30 crc kubenswrapper[4603]: I0930 19:50:30.942087 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.435376 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mhq5r" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="registry-server" containerID="cri-o://a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45" gracePeriod=2 Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.799800 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.893585 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slfv6\" (UniqueName: \"kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6\") pod \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.893695 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content\") pod \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.893724 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities\") pod \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\" (UID: \"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8\") " Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.894521 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities" (OuterVolumeSpecName: "utilities") pod "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" (UID: "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.899773 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6" (OuterVolumeSpecName: "kube-api-access-slfv6") pod "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" (UID: "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8"). InnerVolumeSpecName "kube-api-access-slfv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.945562 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" (UID: "93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.995376 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slfv6\" (UniqueName: \"kubernetes.io/projected/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-kube-api-access-slfv6\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.995401 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:31 crc kubenswrapper[4603]: I0930 19:50:31.995413 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.441551 4603 generic.go:334] "Generic (PLEG): container finished" podID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerID="a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45" exitCode=0 Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.441596 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerDied","Data":"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45"} Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.441606 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mhq5r" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.441634 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mhq5r" event={"ID":"93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8","Type":"ContainerDied","Data":"5c51764a324019cf3b902cff626b77f844bfdf4bb3095f870da897ca90e1fdc7"} Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.441655 4603 scope.go:117] "RemoveContainer" containerID="a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.470355 4603 scope.go:117] "RemoveContainer" containerID="90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.482276 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.485035 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mhq5r"] Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.499540 4603 scope.go:117] "RemoveContainer" containerID="9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.511397 4603 scope.go:117] "RemoveContainer" containerID="a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45" Sep 30 19:50:32 crc kubenswrapper[4603]: E0930 19:50:32.517082 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45\": container with ID starting with a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45 not found: ID does not exist" containerID="a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.517156 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45"} err="failed to get container status \"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45\": rpc error: code = NotFound desc = could not find container \"a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45\": container with ID starting with a3e209fc478ec1e34ff49af415fac17bb5e11b02d9cb45b7a15ecd487f37fa45 not found: ID does not exist" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.517234 4603 scope.go:117] "RemoveContainer" containerID="90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631" Sep 30 19:50:32 crc kubenswrapper[4603]: E0930 19:50:32.517627 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631\": container with ID starting with 90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631 not found: ID does not exist" containerID="90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.517666 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631"} err="failed to get container status \"90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631\": rpc error: code = NotFound desc = could not find container \"90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631\": container with ID starting with 90437c1c4ab0465dafd42fac0e54016674d7705101f6fe1861ae9eb283a7b631 not found: ID does not exist" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.517679 4603 scope.go:117] "RemoveContainer" containerID="9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b" Sep 30 19:50:32 crc kubenswrapper[4603]: E0930 19:50:32.518549 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b\": container with ID starting with 9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b not found: ID does not exist" containerID="9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.518589 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b"} err="failed to get container status \"9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b\": rpc error: code = NotFound desc = could not find container \"9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b\": container with ID starting with 9da46548d6e076808b5453ffd38614b266fada1199a24687c3fc0b5d5fb7774b not found: ID does not exist" Sep 30 19:50:32 crc kubenswrapper[4603]: I0930 19:50:32.771346 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" path="/var/lib/kubelet/pods/93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8/volumes" Sep 30 19:50:55 crc kubenswrapper[4603]: I0930 19:50:55.983894 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" containerID="cri-o://ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f" gracePeriod=15 Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.384601 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427028 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-jrtc8"] Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427337 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427358 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427378 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427391 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427407 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427421 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427440 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427451 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427466 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427477 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427495 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427507 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427527 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427538 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427560 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427572 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427590 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427602 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427620 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427632 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427655 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427667 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="extract-utilities" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427681 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427693 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427712 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427727 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="extract-content" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.427750 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277e6095-4a7c-4355-a944-3f7179b8f06e" containerName="pruner" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.427910 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="277e6095-4a7c-4355-a944-3f7179b8f06e" containerName="pruner" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430302 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="93ba0bd6-e15b-47fb-8168-c77b4c4b4bb8" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430340 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b36b6e6-0ac7-4444-82a8-9bfc58fd604a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430360 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="277e6095-4a7c-4355-a944-3f7179b8f06e" containerName="pruner" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430378 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerName="oauth-openshift" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430399 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="436ba6f1-35f4-4952-b793-c73d9585c715" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430415 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b72c703-50e4-40a8-8d48-f44937ac8f4a" containerName="registry-server" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.430977 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.442796 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-jrtc8"] Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.521986 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.522045 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.522089 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.522148 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.522291 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.523724 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.523802 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.523849 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.523904 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.523957 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.524013 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.524051 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2729\" (UniqueName: \"kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.524112 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.524158 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error\") pod \"cfa60764-186a-4584-89f8-bb7df4bd2831\" (UID: \"cfa60764-186a-4584-89f8-bb7df4bd2831\") " Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.526951 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527128 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527286 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527469 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527664 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-dir\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527841 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.527973 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528198 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8q5z\" (UniqueName: \"kubernetes.io/projected/9783bf09-d95e-4f91-b510-eb5a8cee0255-kube-api-access-g8q5z\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528572 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528768 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.531651 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528286 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528934 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.528962 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.529192 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.529699 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.529733 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.530129 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.530403 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.531135 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.531673 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.531952 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.531890 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-policies\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.532934 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533091 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533432 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533555 4603 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533644 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533735 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533834 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.533940 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534033 4603 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfa60764-186a-4584-89f8-bb7df4bd2831-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534146 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534262 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534399 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534488 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.534283 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.540199 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.543947 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729" (OuterVolumeSpecName: "kube-api-access-m2729") pod "cfa60764-186a-4584-89f8-bb7df4bd2831" (UID: "cfa60764-186a-4584-89f8-bb7df4bd2831"). InnerVolumeSpecName "kube-api-access-m2729". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.587005 4603 generic.go:334] "Generic (PLEG): container finished" podID="cfa60764-186a-4584-89f8-bb7df4bd2831" containerID="ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f" exitCode=0 Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.587053 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" event={"ID":"cfa60764-186a-4584-89f8-bb7df4bd2831","Type":"ContainerDied","Data":"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f"} Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.587079 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" event={"ID":"cfa60764-186a-4584-89f8-bb7df4bd2831","Type":"ContainerDied","Data":"0e15c170c248f3c7017f727819dc342081d11358b17504a72c2835b169c05025"} Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.587094 4603 scope.go:117] "RemoveContainer" containerID="ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.587232 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pz75" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.611540 4603 scope.go:117] "RemoveContainer" containerID="ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f" Sep 30 19:50:56 crc kubenswrapper[4603]: E0930 19:50:56.615410 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f\": container with ID starting with ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f not found: ID does not exist" containerID="ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.615499 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f"} err="failed to get container status \"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f\": rpc error: code = NotFound desc = could not find container \"ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f\": container with ID starting with ce20b82a5d2b002c2962b46c27d653a79e2da6d70fe00457ed4e54ef6eedca1f not found: ID does not exist" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.616981 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.621908 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pz75"] Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635417 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635447 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635482 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635501 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-dir\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635541 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635560 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635575 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8q5z\" (UniqueName: \"kubernetes.io/projected/9783bf09-d95e-4f91-b510-eb5a8cee0255-kube-api-access-g8q5z\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635601 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635622 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635636 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635651 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-policies\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635667 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635691 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635715 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635745 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2729\" (UniqueName: \"kubernetes.io/projected/cfa60764-186a-4584-89f8-bb7df4bd2831-kube-api-access-m2729\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635756 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.635768 4603 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfa60764-186a-4584-89f8-bb7df4bd2831-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.637137 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-dir\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.638411 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-service-ca\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.639092 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-error\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.639095 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-cliconfig\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.639959 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-router-certs\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.639973 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-audit-policies\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.641066 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.641724 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.642664 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-serving-cert\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.643016 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-login\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.644010 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.644317 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.645498 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9783bf09-d95e-4f91-b510-eb5a8cee0255-v4-0-config-system-session\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.651996 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8q5z\" (UniqueName: \"kubernetes.io/projected/9783bf09-d95e-4f91-b510-eb5a8cee0255-kube-api-access-g8q5z\") pod \"oauth-openshift-594b4c596d-jrtc8\" (UID: \"9783bf09-d95e-4f91-b510-eb5a8cee0255\") " pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.750354 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.773460 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfa60764-186a-4584-89f8-bb7df4bd2831" path="/var/lib/kubelet/pods/cfa60764-186a-4584-89f8-bb7df4bd2831/volumes" Sep 30 19:50:56 crc kubenswrapper[4603]: I0930 19:50:56.987703 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-594b4c596d-jrtc8"] Sep 30 19:50:57 crc kubenswrapper[4603]: I0930 19:50:57.593581 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" event={"ID":"9783bf09-d95e-4f91-b510-eb5a8cee0255","Type":"ContainerStarted","Data":"67672b9d48e980695fa38c73b6d652aba73905030905ce5f7a7e967b83bef9b1"} Sep 30 19:50:57 crc kubenswrapper[4603]: I0930 19:50:57.593631 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" event={"ID":"9783bf09-d95e-4f91-b510-eb5a8cee0255","Type":"ContainerStarted","Data":"d5c2e2c51d30092cc70385ce012ca3097a9eae24fb850f618c0d5eacb36c987e"} Sep 30 19:50:57 crc kubenswrapper[4603]: I0930 19:50:57.593934 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:50:57 crc kubenswrapper[4603]: I0930 19:50:57.623067 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" podStartSLOduration=27.623049581 podStartE2EDuration="27.623049581s" podCreationTimestamp="2025-09-30 19:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:50:57.620106487 +0000 UTC m=+259.558565305" watchObservedRunningTime="2025-09-30 19:50:57.623049581 +0000 UTC m=+259.561508389" Sep 30 19:50:57 crc kubenswrapper[4603]: I0930 19:50:57.875068 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-594b4c596d-jrtc8" Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.772916 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.773689 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8rxlc" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="registry-server" containerID="cri-o://21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.782636 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.782848 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9k5ct" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="registry-server" containerID="cri-o://6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.792037 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.792375 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" containerID="cri-o://86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.798864 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.799090 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bsmjt" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="registry-server" containerID="cri-o://55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.809283 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.809503 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v27rc" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="registry-server" containerID="cri-o://81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283" gracePeriod=30 Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.827155 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dg8pl"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.827765 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.846030 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dg8pl"] Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.915968 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.916033 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltrxf\" (UniqueName: \"kubernetes.io/projected/cc7471b5-5468-4585-a14c-dec890fce87f-kube-api-access-ltrxf\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:14 crc kubenswrapper[4603]: I0930 19:51:14.916101 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.017381 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.017437 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.017476 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltrxf\" (UniqueName: \"kubernetes.io/projected/cc7471b5-5468-4585-a14c-dec890fce87f-kube-api-access-ltrxf\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.019648 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.022880 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc7471b5-5468-4585-a14c-dec890fce87f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.035018 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltrxf\" (UniqueName: \"kubernetes.io/projected/cc7471b5-5468-4585-a14c-dec890fce87f-kube-api-access-ltrxf\") pod \"marketplace-operator-79b997595-dg8pl\" (UID: \"cc7471b5-5468-4585-a14c-dec890fce87f\") " pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.205371 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.217195 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.302608 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.327545 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities\") pod \"3dc3774c-3eac-4198-bef1-0463a48b7ece\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.327575 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh6wj\" (UniqueName: \"kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj\") pod \"3dc3774c-3eac-4198-bef1-0463a48b7ece\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.327655 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content\") pod \"3dc3774c-3eac-4198-bef1-0463a48b7ece\" (UID: \"3dc3774c-3eac-4198-bef1-0463a48b7ece\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.329622 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities" (OuterVolumeSpecName: "utilities") pod "3dc3774c-3eac-4198-bef1-0463a48b7ece" (UID: "3dc3774c-3eac-4198-bef1-0463a48b7ece"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.330805 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.338624 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj" (OuterVolumeSpecName: "kube-api-access-nh6wj") pod "3dc3774c-3eac-4198-bef1-0463a48b7ece" (UID: "3dc3774c-3eac-4198-bef1-0463a48b7ece"). InnerVolumeSpecName "kube-api-access-nh6wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.346850 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.372039 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.414254 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3dc3774c-3eac-4198-bef1-0463a48b7ece" (UID: "3dc3774c-3eac-4198-bef1-0463a48b7ece"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.428723 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics\") pod \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.428816 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbtk8\" (UniqueName: \"kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8\") pod \"29ec1b09-de04-46c7-aa54-32c9aff58a89\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.428849 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9wfl\" (UniqueName: \"kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl\") pod \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429432 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpbdg\" (UniqueName: \"kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg\") pod \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429455 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content\") pod \"29ec1b09-de04-46c7-aa54-32c9aff58a89\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429476 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities\") pod \"29ec1b09-de04-46c7-aa54-32c9aff58a89\" (UID: \"29ec1b09-de04-46c7-aa54-32c9aff58a89\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429494 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca\") pod \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\" (UID: \"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429675 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities\") pod \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.429714 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content\") pod \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\" (UID: \"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.430086 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.430103 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh6wj\" (UniqueName: \"kubernetes.io/projected/3dc3774c-3eac-4198-bef1-0463a48b7ece-kube-api-access-nh6wj\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.430113 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dc3774c-3eac-4198-bef1-0463a48b7ece-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.430714 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" (UID: "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.432317 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities" (OuterVolumeSpecName: "utilities") pod "29ec1b09-de04-46c7-aa54-32c9aff58a89" (UID: "29ec1b09-de04-46c7-aa54-32c9aff58a89"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.432407 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl" (OuterVolumeSpecName: "kube-api-access-w9wfl") pod "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" (UID: "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8"). InnerVolumeSpecName "kube-api-access-w9wfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.433220 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" (UID: "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.433269 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities" (OuterVolumeSpecName: "utilities") pod "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" (UID: "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.433684 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8" (OuterVolumeSpecName: "kube-api-access-xbtk8") pod "29ec1b09-de04-46c7-aa54-32c9aff58a89" (UID: "29ec1b09-de04-46c7-aa54-32c9aff58a89"). InnerVolumeSpecName "kube-api-access-xbtk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.434244 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg" (OuterVolumeSpecName: "kube-api-access-tpbdg") pod "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" (UID: "9c1474bd-3ed9-4272-a0c7-0de2d695c3e6"). InnerVolumeSpecName "kube-api-access-tpbdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.444439 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" (UID: "96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.511343 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29ec1b09-de04-46c7-aa54-32c9aff58a89" (UID: "29ec1b09-de04-46c7-aa54-32c9aff58a89"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531188 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities\") pod \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531261 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content\") pod \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531309 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb9pw\" (UniqueName: \"kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw\") pod \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\" (UID: \"eb6ecb74-a79b-41f9-802c-0f1bbc123df4\") " Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531547 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531568 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531583 4603 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531640 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbtk8\" (UniqueName: \"kubernetes.io/projected/29ec1b09-de04-46c7-aa54-32c9aff58a89-kube-api-access-xbtk8\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531652 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9wfl\" (UniqueName: \"kubernetes.io/projected/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8-kube-api-access-w9wfl\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531664 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpbdg\" (UniqueName: \"kubernetes.io/projected/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-kube-api-access-tpbdg\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531675 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531686 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ec1b09-de04-46c7-aa54-32c9aff58a89-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.531728 4603 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.532523 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities" (OuterVolumeSpecName: "utilities") pod "eb6ecb74-a79b-41f9-802c-0f1bbc123df4" (UID: "eb6ecb74-a79b-41f9-802c-0f1bbc123df4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.534426 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw" (OuterVolumeSpecName: "kube-api-access-mb9pw") pod "eb6ecb74-a79b-41f9-802c-0f1bbc123df4" (UID: "eb6ecb74-a79b-41f9-802c-0f1bbc123df4"). InnerVolumeSpecName "kube-api-access-mb9pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.609351 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb6ecb74-a79b-41f9-802c-0f1bbc123df4" (UID: "eb6ecb74-a79b-41f9-802c-0f1bbc123df4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.633255 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.633287 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.633299 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb9pw\" (UniqueName: \"kubernetes.io/projected/eb6ecb74-a79b-41f9-802c-0f1bbc123df4-kube-api-access-mb9pw\") on node \"crc\" DevicePath \"\"" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.682110 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dg8pl"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.687929 4603 generic.go:334] "Generic (PLEG): container finished" podID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerID="86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.687980 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" event={"ID":"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6","Type":"ContainerDied","Data":"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.688001 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" event={"ID":"9c1474bd-3ed9-4272-a0c7-0de2d695c3e6","Type":"ContainerDied","Data":"a8b3670ecb7f7de233d126fec13661d1775d7b985d4b5fb2927839878e6c78ac"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.688018 4603 scope.go:117] "RemoveContainer" containerID="86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.688067 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pq477" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.692674 4603 generic.go:334] "Generic (PLEG): container finished" podID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerID="21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.692746 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerDied","Data":"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.692771 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8rxlc" event={"ID":"3dc3774c-3eac-4198-bef1-0463a48b7ece","Type":"ContainerDied","Data":"9179dd4849629cd99256174305419e555545d357463fad99bcd0b74e56db2cf9"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.692822 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8rxlc" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.702071 4603 generic.go:334] "Generic (PLEG): container finished" podID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerID="81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.702125 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerDied","Data":"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.702149 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v27rc" event={"ID":"eb6ecb74-a79b-41f9-802c-0f1bbc123df4","Type":"ContainerDied","Data":"b5ff32a7d478eaf03b263d844499f993b5655fc7832bfaa4950c3600040f5e56"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.702235 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v27rc" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.711786 4603 scope.go:117] "RemoveContainer" containerID="86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.711861 4603 generic.go:334] "Generic (PLEG): container finished" podID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerID="6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.711889 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerDied","Data":"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.711928 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9k5ct" event={"ID":"29ec1b09-de04-46c7-aa54-32c9aff58a89","Type":"ContainerDied","Data":"23f419e335b88b9115e9aa9fa678833c1e9b92eb897dbaa7ba0a93bd89b42396"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.711952 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9k5ct" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.712059 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903\": container with ID starting with 86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903 not found: ID does not exist" containerID="86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.712099 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903"} err="failed to get container status \"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903\": rpc error: code = NotFound desc = could not find container \"86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903\": container with ID starting with 86f58f9ba07f4b73a92a15e712c360623ab7f5514aa6acacbbe91ddbcb016903 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.712124 4603 scope.go:117] "RemoveContainer" containerID="21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.719296 4603 generic.go:334] "Generic (PLEG): container finished" podID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerID="55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248" exitCode=0 Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.719346 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerDied","Data":"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.719377 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bsmjt" event={"ID":"96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8","Type":"ContainerDied","Data":"c80c40dee1a82a0253b1c4c4536f102ee3c25a1bc194df9ec3c6e308c25f0c47"} Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.719450 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bsmjt" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.723886 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.734462 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pq477"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.734976 4603 scope.go:117] "RemoveContainer" containerID="60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.748239 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.760992 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8rxlc"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.772557 4603 scope.go:117] "RemoveContainer" containerID="9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.791952 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.801239 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9k5ct"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.805606 4603 scope.go:117] "RemoveContainer" containerID="21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.807085 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659\": container with ID starting with 21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659 not found: ID does not exist" containerID="21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.807645 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659"} err="failed to get container status \"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659\": rpc error: code = NotFound desc = could not find container \"21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659\": container with ID starting with 21fe905723b0b52038011bc6292169170da18da9adaf05580b28ed69321af659 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.807708 4603 scope.go:117] "RemoveContainer" containerID="60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.814586 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f\": container with ID starting with 60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f not found: ID does not exist" containerID="60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.814639 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f"} err="failed to get container status \"60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f\": rpc error: code = NotFound desc = could not find container \"60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f\": container with ID starting with 60817ec146c6e9daf12a1f9a6b45f2c6c602e045b2d610c3ff2739bdf24da53f not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.814661 4603 scope.go:117] "RemoveContainer" containerID="9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.815717 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738\": container with ID starting with 9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738 not found: ID does not exist" containerID="9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.815770 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738"} err="failed to get container status \"9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738\": rpc error: code = NotFound desc = could not find container \"9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738\": container with ID starting with 9c67fae12a5e11da8f94969406f02d7d3bb181f58d31b5a7ac4342d91cf47738 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.815801 4603 scope.go:117] "RemoveContainer" containerID="81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.823468 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.830148 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v27rc"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.833343 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.836236 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bsmjt"] Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.850317 4603 scope.go:117] "RemoveContainer" containerID="8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.873800 4603 scope.go:117] "RemoveContainer" containerID="7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.890369 4603 scope.go:117] "RemoveContainer" containerID="81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.890649 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283\": container with ID starting with 81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283 not found: ID does not exist" containerID="81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.890686 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283"} err="failed to get container status \"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283\": rpc error: code = NotFound desc = could not find container \"81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283\": container with ID starting with 81bae89f92ab850364ed17df7b9f7f50cbfbc05fc2e8f06018751281e1f43283 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.890714 4603 scope.go:117] "RemoveContainer" containerID="8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.891098 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9\": container with ID starting with 8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9 not found: ID does not exist" containerID="8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.891128 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9"} err="failed to get container status \"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9\": rpc error: code = NotFound desc = could not find container \"8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9\": container with ID starting with 8b324e69dcb495fd790a7e373f072af316e15d534514a24dd53571b7c22f59e9 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.891148 4603 scope.go:117] "RemoveContainer" containerID="7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.891456 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d\": container with ID starting with 7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d not found: ID does not exist" containerID="7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.891477 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d"} err="failed to get container status \"7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d\": rpc error: code = NotFound desc = could not find container \"7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d\": container with ID starting with 7ba67e10af974325ff3e140edef6d33d88d94afc7d5f617396775b7001487a3d not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.891493 4603 scope.go:117] "RemoveContainer" containerID="6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.939476 4603 scope.go:117] "RemoveContainer" containerID="562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.951730 4603 scope.go:117] "RemoveContainer" containerID="bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.969593 4603 scope.go:117] "RemoveContainer" containerID="6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.970817 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55\": container with ID starting with 6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55 not found: ID does not exist" containerID="6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.970861 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55"} err="failed to get container status \"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55\": rpc error: code = NotFound desc = could not find container \"6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55\": container with ID starting with 6cc5b20f47e25b88fca1819e9733c2e29124600447df9d90334593b6a8015f55 not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.970891 4603 scope.go:117] "RemoveContainer" containerID="562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.971184 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf\": container with ID starting with 562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf not found: ID does not exist" containerID="562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.971216 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf"} err="failed to get container status \"562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf\": rpc error: code = NotFound desc = could not find container \"562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf\": container with ID starting with 562a9ba8553edb395f0ff7cdc5f4eec7316a6c5257deeaa9e3ba013c9a054fdf not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.971238 4603 scope.go:117] "RemoveContainer" containerID="bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e" Sep 30 19:51:15 crc kubenswrapper[4603]: E0930 19:51:15.971417 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e\": container with ID starting with bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e not found: ID does not exist" containerID="bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.971436 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e"} err="failed to get container status \"bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e\": rpc error: code = NotFound desc = could not find container \"bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e\": container with ID starting with bea6a11ff4ff82af45ad98f098a73c12f6fb9510918bcfec26c052ef6f81013e not found: ID does not exist" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.971450 4603 scope.go:117] "RemoveContainer" containerID="55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.982140 4603 scope.go:117] "RemoveContainer" containerID="1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd" Sep 30 19:51:15 crc kubenswrapper[4603]: I0930 19:51:15.992911 4603 scope.go:117] "RemoveContainer" containerID="7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.005705 4603 scope.go:117] "RemoveContainer" containerID="55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.006050 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248\": container with ID starting with 55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248 not found: ID does not exist" containerID="55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.006077 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248"} err="failed to get container status \"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248\": rpc error: code = NotFound desc = could not find container \"55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248\": container with ID starting with 55fff9772bf1c9fcffc73d06867b94d92f278160c1d0dd73e9f37a3cfc9d1248 not found: ID does not exist" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.006098 4603 scope.go:117] "RemoveContainer" containerID="1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.006666 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd\": container with ID starting with 1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd not found: ID does not exist" containerID="1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.006684 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd"} err="failed to get container status \"1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd\": rpc error: code = NotFound desc = could not find container \"1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd\": container with ID starting with 1d2cb39a8cec122cae165541ad6b15e781cb62fba02810f48692399b2ce431cd not found: ID does not exist" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.006696 4603 scope.go:117] "RemoveContainer" containerID="7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.006986 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50\": container with ID starting with 7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50 not found: ID does not exist" containerID="7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.007004 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50"} err="failed to get container status \"7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50\": rpc error: code = NotFound desc = could not find container \"7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50\": container with ID starting with 7a7fe6f58a03a27ed48b5db569971cbf1cbcbbc4aeb01968a7749c66c90f2f50 not found: ID does not exist" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589638 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9n76h"] Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589841 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589856 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589866 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589875 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589884 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589893 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589904 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589911 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589922 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589930 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589941 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589948 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589959 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589967 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.589980 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.589988 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.590000 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590007 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.590019 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590026 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="extract-utilities" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.590036 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590043 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.590053 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590060 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: E0930 19:51:16.590073 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590080 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="extract-content" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590202 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590216 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590227 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590238 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" containerName="registry-server" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590253 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" containerName="marketplace-operator" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.590942 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.592658 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.611847 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9n76h"] Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.728255 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" event={"ID":"cc7471b5-5468-4585-a14c-dec890fce87f","Type":"ContainerStarted","Data":"f63fe23398a8eeb8c687618311a83e4be39e062e49764994562fa2ed159476c8"} Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.728292 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" event={"ID":"cc7471b5-5468-4585-a14c-dec890fce87f","Type":"ContainerStarted","Data":"fcda47ab902b5c6472b645268701a38db7d022a0fae070412c772493c66def4b"} Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.729370 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.734249 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.745950 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-utilities\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.746014 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-catalog-content\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.746042 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg255\" (UniqueName: \"kubernetes.io/projected/bec44cb0-0ba0-4168-9bc6-96216f3266b7-kube-api-access-dg255\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.759152 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-dg8pl" podStartSLOduration=2.759137654 podStartE2EDuration="2.759137654s" podCreationTimestamp="2025-09-30 19:51:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:51:16.757003173 +0000 UTC m=+278.695461991" watchObservedRunningTime="2025-09-30 19:51:16.759137654 +0000 UTC m=+278.697596472" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.770814 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ec1b09-de04-46c7-aa54-32c9aff58a89" path="/var/lib/kubelet/pods/29ec1b09-de04-46c7-aa54-32c9aff58a89/volumes" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.771806 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc3774c-3eac-4198-bef1-0463a48b7ece" path="/var/lib/kubelet/pods/3dc3774c-3eac-4198-bef1-0463a48b7ece/volumes" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.772594 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8" path="/var/lib/kubelet/pods/96dfd0f4-18a6-4dd2-8cf8-935bbffcb2a8/volumes" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.774120 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c1474bd-3ed9-4272-a0c7-0de2d695c3e6" path="/var/lib/kubelet/pods/9c1474bd-3ed9-4272-a0c7-0de2d695c3e6/volumes" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.774754 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb6ecb74-a79b-41f9-802c-0f1bbc123df4" path="/var/lib/kubelet/pods/eb6ecb74-a79b-41f9-802c-0f1bbc123df4/volumes" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.848236 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-catalog-content\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.848318 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg255\" (UniqueName: \"kubernetes.io/projected/bec44cb0-0ba0-4168-9bc6-96216f3266b7-kube-api-access-dg255\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.848412 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-utilities\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.849612 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-catalog-content\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.850249 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bec44cb0-0ba0-4168-9bc6-96216f3266b7-utilities\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.872760 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg255\" (UniqueName: \"kubernetes.io/projected/bec44cb0-0ba0-4168-9bc6-96216f3266b7-kube-api-access-dg255\") pod \"redhat-marketplace-9n76h\" (UID: \"bec44cb0-0ba0-4168-9bc6-96216f3266b7\") " pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:16 crc kubenswrapper[4603]: I0930 19:51:16.905447 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.313413 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9n76h"] Sep 30 19:51:17 crc kubenswrapper[4603]: W0930 19:51:17.317873 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbec44cb0_0ba0_4168_9bc6_96216f3266b7.slice/crio-850b65ed934d0dc985476f77530dd166f89708176e6669332029d91015027ab1 WatchSource:0}: Error finding container 850b65ed934d0dc985476f77530dd166f89708176e6669332029d91015027ab1: Status 404 returned error can't find the container with id 850b65ed934d0dc985476f77530dd166f89708176e6669332029d91015027ab1 Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.583145 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nmwg4"] Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.584481 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.586192 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.595067 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmwg4"] Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.737966 4603 generic.go:334] "Generic (PLEG): container finished" podID="bec44cb0-0ba0-4168-9bc6-96216f3266b7" containerID="e1c02096200274e1c8b2ec62e6a9f5c74be3339073db0f0c07f73ffc4d5a56ac" exitCode=0 Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.738030 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9n76h" event={"ID":"bec44cb0-0ba0-4168-9bc6-96216f3266b7","Type":"ContainerDied","Data":"e1c02096200274e1c8b2ec62e6a9f5c74be3339073db0f0c07f73ffc4d5a56ac"} Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.738083 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9n76h" event={"ID":"bec44cb0-0ba0-4168-9bc6-96216f3266b7","Type":"ContainerStarted","Data":"850b65ed934d0dc985476f77530dd166f89708176e6669332029d91015027ab1"} Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.759687 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-utilities\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.759762 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzvrj\" (UniqueName: \"kubernetes.io/projected/2f3491da-e6fc-4233-b117-34c80f1c2085-kube-api-access-pzvrj\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.759794 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-catalog-content\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.861029 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-utilities\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.861111 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzvrj\" (UniqueName: \"kubernetes.io/projected/2f3491da-e6fc-4233-b117-34c80f1c2085-kube-api-access-pzvrj\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.861178 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-catalog-content\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.861516 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-utilities\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.862207 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f3491da-e6fc-4233-b117-34c80f1c2085-catalog-content\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.889429 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzvrj\" (UniqueName: \"kubernetes.io/projected/2f3491da-e6fc-4233-b117-34c80f1c2085-kube-api-access-pzvrj\") pod \"certified-operators-nmwg4\" (UID: \"2f3491da-e6fc-4233-b117-34c80f1c2085\") " pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:17 crc kubenswrapper[4603]: I0930 19:51:17.900918 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.277745 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmwg4"] Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.746263 4603 generic.go:334] "Generic (PLEG): container finished" podID="bec44cb0-0ba0-4168-9bc6-96216f3266b7" containerID="705102c9ca21919064c3c738411b10a8c1fcfceb8ac76844f19bef85242c7da6" exitCode=0 Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.746557 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9n76h" event={"ID":"bec44cb0-0ba0-4168-9bc6-96216f3266b7","Type":"ContainerDied","Data":"705102c9ca21919064c3c738411b10a8c1fcfceb8ac76844f19bef85242c7da6"} Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.751640 4603 generic.go:334] "Generic (PLEG): container finished" podID="2f3491da-e6fc-4233-b117-34c80f1c2085" containerID="b478db41d22970a0221c57a16164725d414ece74b34777f5244b8817627e21c6" exitCode=0 Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.751788 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmwg4" event={"ID":"2f3491da-e6fc-4233-b117-34c80f1c2085","Type":"ContainerDied","Data":"b478db41d22970a0221c57a16164725d414ece74b34777f5244b8817627e21c6"} Sep 30 19:51:18 crc kubenswrapper[4603]: I0930 19:51:18.751822 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmwg4" event={"ID":"2f3491da-e6fc-4233-b117-34c80f1c2085","Type":"ContainerStarted","Data":"806d7285d6ac38778d540eb85b17aec14ffbadd52c1d07beb1329ee042d438d4"} Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.005243 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b9rn7"] Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.006868 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.009069 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.011693 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9rn7"] Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.185520 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-utilities\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.185555 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-992b8\" (UniqueName: \"kubernetes.io/projected/6e95bbaf-6456-4d51-ba65-63cb2948bf52-kube-api-access-992b8\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.185626 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-catalog-content\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.286290 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-utilities\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.286551 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-992b8\" (UniqueName: \"kubernetes.io/projected/6e95bbaf-6456-4d51-ba65-63cb2948bf52-kube-api-access-992b8\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.286589 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-catalog-content\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.286721 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-utilities\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.286915 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e95bbaf-6456-4d51-ba65-63cb2948bf52-catalog-content\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.306930 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-992b8\" (UniqueName: \"kubernetes.io/projected/6e95bbaf-6456-4d51-ba65-63cb2948bf52-kube-api-access-992b8\") pod \"redhat-operators-b9rn7\" (UID: \"6e95bbaf-6456-4d51-ba65-63cb2948bf52\") " pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.329310 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.753780 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9rn7"] Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.760890 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9n76h" event={"ID":"bec44cb0-0ba0-4168-9bc6-96216f3266b7","Type":"ContainerStarted","Data":"548cb1bb7a55932ff58463acbb9863ac8ebe8f15d51c1759b9f95bafdf497113"} Sep 30 19:51:19 crc kubenswrapper[4603]: W0930 19:51:19.762740 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e95bbaf_6456_4d51_ba65_63cb2948bf52.slice/crio-1c4a0e23d3b2ce0a176c633a34d1c6728199970372b6cdadbfc43908da7c697f WatchSource:0}: Error finding container 1c4a0e23d3b2ce0a176c633a34d1c6728199970372b6cdadbfc43908da7c697f: Status 404 returned error can't find the container with id 1c4a0e23d3b2ce0a176c633a34d1c6728199970372b6cdadbfc43908da7c697f Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.763859 4603 generic.go:334] "Generic (PLEG): container finished" podID="2f3491da-e6fc-4233-b117-34c80f1c2085" containerID="221b350de1fac4bdf5b9146ea80eaa76e1ca8d2265fb6f477c4e4b8f62580afc" exitCode=0 Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.763889 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmwg4" event={"ID":"2f3491da-e6fc-4233-b117-34c80f1c2085","Type":"ContainerDied","Data":"221b350de1fac4bdf5b9146ea80eaa76e1ca8d2265fb6f477c4e4b8f62580afc"} Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.782462 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9n76h" podStartSLOduration=2.33952345 podStartE2EDuration="3.782440731s" podCreationTimestamp="2025-09-30 19:51:16 +0000 UTC" firstStartedPulling="2025-09-30 19:51:17.740055053 +0000 UTC m=+279.678513871" lastFinishedPulling="2025-09-30 19:51:19.182972334 +0000 UTC m=+281.121431152" observedRunningTime="2025-09-30 19:51:19.780373483 +0000 UTC m=+281.718832321" watchObservedRunningTime="2025-09-30 19:51:19.782440731 +0000 UTC m=+281.720899549" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.988818 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jxpv6"] Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.989930 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:19 crc kubenswrapper[4603]: I0930 19:51:19.995700 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.000065 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jxpv6"] Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.102865 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-utilities\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.102986 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg7ds\" (UniqueName: \"kubernetes.io/projected/b08e6044-208e-45ae-a648-665c6c96c0aa-kube-api-access-zg7ds\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.103018 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-catalog-content\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.204032 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-catalog-content\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.204696 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-utilities\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.204739 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg7ds\" (UniqueName: \"kubernetes.io/projected/b08e6044-208e-45ae-a648-665c6c96c0aa-kube-api-access-zg7ds\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.204628 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-catalog-content\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.205232 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b08e6044-208e-45ae-a648-665c6c96c0aa-utilities\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.222372 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg7ds\" (UniqueName: \"kubernetes.io/projected/b08e6044-208e-45ae-a648-665c6c96c0aa-kube-api-access-zg7ds\") pod \"community-operators-jxpv6\" (UID: \"b08e6044-208e-45ae-a648-665c6c96c0aa\") " pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.311427 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.721066 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jxpv6"] Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.770491 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmwg4" event={"ID":"2f3491da-e6fc-4233-b117-34c80f1c2085","Type":"ContainerStarted","Data":"afcd5459c57c893efbebdd33494afac493ffc3b839ecfb29e420f5900ed935c8"} Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.775499 4603 generic.go:334] "Generic (PLEG): container finished" podID="6e95bbaf-6456-4d51-ba65-63cb2948bf52" containerID="be39c7b2886596a403f8d381edcac54c8920477fe5a3766cec1689ad7a886125" exitCode=0 Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.775576 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9rn7" event={"ID":"6e95bbaf-6456-4d51-ba65-63cb2948bf52","Type":"ContainerDied","Data":"be39c7b2886596a403f8d381edcac54c8920477fe5a3766cec1689ad7a886125"} Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.775610 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9rn7" event={"ID":"6e95bbaf-6456-4d51-ba65-63cb2948bf52","Type":"ContainerStarted","Data":"1c4a0e23d3b2ce0a176c633a34d1c6728199970372b6cdadbfc43908da7c697f"} Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.778394 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxpv6" event={"ID":"b08e6044-208e-45ae-a648-665c6c96c0aa","Type":"ContainerStarted","Data":"1f799396d59576ec2c13eaedbd7457b4e5370a8594d261760eaa911a42b43d95"} Sep 30 19:51:20 crc kubenswrapper[4603]: I0930 19:51:20.794336 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nmwg4" podStartSLOduration=2.280508193 podStartE2EDuration="3.794319663s" podCreationTimestamp="2025-09-30 19:51:17 +0000 UTC" firstStartedPulling="2025-09-30 19:51:18.755716521 +0000 UTC m=+280.694175339" lastFinishedPulling="2025-09-30 19:51:20.269527991 +0000 UTC m=+282.207986809" observedRunningTime="2025-09-30 19:51:20.792341247 +0000 UTC m=+282.730800065" watchObservedRunningTime="2025-09-30 19:51:20.794319663 +0000 UTC m=+282.732778481" Sep 30 19:51:21 crc kubenswrapper[4603]: I0930 19:51:21.783818 4603 generic.go:334] "Generic (PLEG): container finished" podID="b08e6044-208e-45ae-a648-665c6c96c0aa" containerID="8df3070750765276a028b27a04f1b678f6acb01ec7f26cf4bb479c662310fc08" exitCode=0 Sep 30 19:51:21 crc kubenswrapper[4603]: I0930 19:51:21.785758 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxpv6" event={"ID":"b08e6044-208e-45ae-a648-665c6c96c0aa","Type":"ContainerDied","Data":"8df3070750765276a028b27a04f1b678f6acb01ec7f26cf4bb479c662310fc08"} Sep 30 19:51:22 crc kubenswrapper[4603]: I0930 19:51:22.792030 4603 generic.go:334] "Generic (PLEG): container finished" podID="6e95bbaf-6456-4d51-ba65-63cb2948bf52" containerID="b64775203805956b57b4c9ad1d5340c1c52f0d95d455a5fe90ba3b9c98cd94d4" exitCode=0 Sep 30 19:51:22 crc kubenswrapper[4603]: I0930 19:51:22.792113 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9rn7" event={"ID":"6e95bbaf-6456-4d51-ba65-63cb2948bf52","Type":"ContainerDied","Data":"b64775203805956b57b4c9ad1d5340c1c52f0d95d455a5fe90ba3b9c98cd94d4"} Sep 30 19:51:24 crc kubenswrapper[4603]: I0930 19:51:24.805273 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9rn7" event={"ID":"6e95bbaf-6456-4d51-ba65-63cb2948bf52","Type":"ContainerStarted","Data":"28133def60f6c2d04574270759712347179600b56281ea54cdb705d93b2689ab"} Sep 30 19:51:24 crc kubenswrapper[4603]: I0930 19:51:24.807452 4603 generic.go:334] "Generic (PLEG): container finished" podID="b08e6044-208e-45ae-a648-665c6c96c0aa" containerID="f81657b2bd106239ef1e42189825bee2adacb61cf297038b960c24e2fb01deb0" exitCode=0 Sep 30 19:51:24 crc kubenswrapper[4603]: I0930 19:51:24.807499 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxpv6" event={"ID":"b08e6044-208e-45ae-a648-665c6c96c0aa","Type":"ContainerDied","Data":"f81657b2bd106239ef1e42189825bee2adacb61cf297038b960c24e2fb01deb0"} Sep 30 19:51:24 crc kubenswrapper[4603]: I0930 19:51:24.839946 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b9rn7" podStartSLOduration=4.378616321 podStartE2EDuration="6.839924927s" podCreationTimestamp="2025-09-30 19:51:18 +0000 UTC" firstStartedPulling="2025-09-30 19:51:20.776742718 +0000 UTC m=+282.715201536" lastFinishedPulling="2025-09-30 19:51:23.238051324 +0000 UTC m=+285.176510142" observedRunningTime="2025-09-30 19:51:24.823946577 +0000 UTC m=+286.762405395" watchObservedRunningTime="2025-09-30 19:51:24.839924927 +0000 UTC m=+286.778383755" Sep 30 19:51:26 crc kubenswrapper[4603]: I0930 19:51:26.822066 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxpv6" event={"ID":"b08e6044-208e-45ae-a648-665c6c96c0aa","Type":"ContainerStarted","Data":"edb86db20098f3a1f1a061ec8e71af737830c5a6aed8d8e667364236611669ad"} Sep 30 19:51:26 crc kubenswrapper[4603]: I0930 19:51:26.838377 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jxpv6" podStartSLOduration=3.861999556 podStartE2EDuration="7.838358587s" podCreationTimestamp="2025-09-30 19:51:19 +0000 UTC" firstStartedPulling="2025-09-30 19:51:21.790141703 +0000 UTC m=+283.728600521" lastFinishedPulling="2025-09-30 19:51:25.766500734 +0000 UTC m=+287.704959552" observedRunningTime="2025-09-30 19:51:26.836693 +0000 UTC m=+288.775151818" watchObservedRunningTime="2025-09-30 19:51:26.838358587 +0000 UTC m=+288.776817405" Sep 30 19:51:26 crc kubenswrapper[4603]: I0930 19:51:26.905770 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:26 crc kubenswrapper[4603]: I0930 19:51:26.905819 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:26 crc kubenswrapper[4603]: I0930 19:51:26.964628 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:27 crc kubenswrapper[4603]: I0930 19:51:27.868754 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9n76h" Sep 30 19:51:27 crc kubenswrapper[4603]: I0930 19:51:27.901811 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:27 crc kubenswrapper[4603]: I0930 19:51:27.901954 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:27 crc kubenswrapper[4603]: I0930 19:51:27.952485 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:28 crc kubenswrapper[4603]: I0930 19:51:28.865480 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nmwg4" Sep 30 19:51:29 crc kubenswrapper[4603]: I0930 19:51:29.330353 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:29 crc kubenswrapper[4603]: I0930 19:51:29.330395 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:29 crc kubenswrapper[4603]: I0930 19:51:29.385303 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:29 crc kubenswrapper[4603]: I0930 19:51:29.880689 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b9rn7" Sep 30 19:51:30 crc kubenswrapper[4603]: I0930 19:51:30.312197 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:30 crc kubenswrapper[4603]: I0930 19:51:30.312251 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:30 crc kubenswrapper[4603]: I0930 19:51:30.374609 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:51:30 crc kubenswrapper[4603]: I0930 19:51:30.892685 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jxpv6" Sep 30 19:52:08 crc kubenswrapper[4603]: I0930 19:52:08.441458 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:52:08 crc kubenswrapper[4603]: I0930 19:52:08.442286 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:52:38 crc kubenswrapper[4603]: I0930 19:52:38.442023 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:52:38 crc kubenswrapper[4603]: I0930 19:52:38.442898 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:53:08 crc kubenswrapper[4603]: I0930 19:53:08.441944 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:53:08 crc kubenswrapper[4603]: I0930 19:53:08.442771 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:53:08 crc kubenswrapper[4603]: I0930 19:53:08.442836 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:53:08 crc kubenswrapper[4603]: I0930 19:53:08.443671 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:53:08 crc kubenswrapper[4603]: I0930 19:53:08.443806 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b" gracePeriod=600 Sep 30 19:53:09 crc kubenswrapper[4603]: I0930 19:53:09.444561 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b" exitCode=0 Sep 30 19:53:09 crc kubenswrapper[4603]: I0930 19:53:09.444857 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b"} Sep 30 19:53:09 crc kubenswrapper[4603]: I0930 19:53:09.445111 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765"} Sep 30 19:53:09 crc kubenswrapper[4603]: I0930 19:53:09.445278 4603 scope.go:117] "RemoveContainer" containerID="1cc6c3d716ff8460dfec317f4499e851670704b8c5a50aab71290a8af2e83d92" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.688800 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-q9vwp"] Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.690620 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.706404 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-q9vwp"] Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832301 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwqv2\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-kube-api-access-lwqv2\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832365 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-tls\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832516 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-trusted-ca\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832549 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-certificates\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832665 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832690 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-ca-trust-extracted\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832735 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-installation-pull-secrets\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.832757 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-bound-sa-token\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.858148 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934569 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-trusted-ca\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934631 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-certificates\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934666 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-ca-trust-extracted\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934700 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-installation-pull-secrets\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934720 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-bound-sa-token\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934746 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwqv2\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-kube-api-access-lwqv2\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.934775 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-tls\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.935493 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-ca-trust-extracted\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.936739 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-trusted-ca\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.938954 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-certificates\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.948787 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-registry-tls\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.948919 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-installation-pull-secrets\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.953768 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwqv2\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-kube-api-access-lwqv2\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:20 crc kubenswrapper[4603]: I0930 19:54:20.959694 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284-bound-sa-token\") pod \"image-registry-66df7c8f76-q9vwp\" (UID: \"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284\") " pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:21 crc kubenswrapper[4603]: I0930 19:54:21.009284 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:21 crc kubenswrapper[4603]: I0930 19:54:21.269349 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-q9vwp"] Sep 30 19:54:21 crc kubenswrapper[4603]: W0930 19:54:21.275570 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c6cc996_b09c_4cd4_aaf4_e6bcd54c7284.slice/crio-e6ed743b0b9145b899c2884a7272c141c63538367ff2c4d26f6c08ccc47bd302 WatchSource:0}: Error finding container e6ed743b0b9145b899c2884a7272c141c63538367ff2c4d26f6c08ccc47bd302: Status 404 returned error can't find the container with id e6ed743b0b9145b899c2884a7272c141c63538367ff2c4d26f6c08ccc47bd302 Sep 30 19:54:21 crc kubenswrapper[4603]: I0930 19:54:21.892776 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" event={"ID":"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284","Type":"ContainerStarted","Data":"2c2429655715563c56b82f8de52f41424d108d5a8331dd78b5863bfe294879fa"} Sep 30 19:54:21 crc kubenswrapper[4603]: I0930 19:54:21.893092 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:21 crc kubenswrapper[4603]: I0930 19:54:21.893107 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" event={"ID":"9c6cc996-b09c-4cd4-aaf4-e6bcd54c7284","Type":"ContainerStarted","Data":"e6ed743b0b9145b899c2884a7272c141c63538367ff2c4d26f6c08ccc47bd302"} Sep 30 19:54:41 crc kubenswrapper[4603]: I0930 19:54:41.016666 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" Sep 30 19:54:41 crc kubenswrapper[4603]: I0930 19:54:41.056681 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-q9vwp" podStartSLOduration=21.056655446 podStartE2EDuration="21.056655446s" podCreationTimestamp="2025-09-30 19:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:54:21.921107464 +0000 UTC m=+463.859566312" watchObservedRunningTime="2025-09-30 19:54:41.056655446 +0000 UTC m=+482.995114264" Sep 30 19:54:41 crc kubenswrapper[4603]: I0930 19:54:41.116713 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.173673 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" podUID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" containerName="registry" containerID="cri-o://ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578" gracePeriod=30 Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.541955 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.632997 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633047 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633125 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633278 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633308 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633341 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633398 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.633418 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptc25\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25\") pod \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\" (UID: \"a4af16ef-bdd1-4804-a11c-d9eda6ed782a\") " Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.634937 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.634992 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.643924 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.644000 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.644272 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25" (OuterVolumeSpecName: "kube-api-access-ptc25") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "kube-api-access-ptc25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.644988 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.651013 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.668471 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a4af16ef-bdd1-4804-a11c-d9eda6ed782a" (UID: "a4af16ef-bdd1-4804-a11c-d9eda6ed782a"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735065 4603 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735118 4603 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735141 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735160 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptc25\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-kube-api-access-ptc25\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735225 4603 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735243 4603 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:06 crc kubenswrapper[4603]: I0930 19:55:06.735260 4603 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4af16ef-bdd1-4804-a11c-d9eda6ed782a-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.187708 4603 generic.go:334] "Generic (PLEG): container finished" podID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" containerID="ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578" exitCode=0 Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.187783 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.187814 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" event={"ID":"a4af16ef-bdd1-4804-a11c-d9eda6ed782a","Type":"ContainerDied","Data":"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578"} Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.188271 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-jrnm6" event={"ID":"a4af16ef-bdd1-4804-a11c-d9eda6ed782a","Type":"ContainerDied","Data":"827a9154a4edee962a92944deb25ec0785e87d8132d8d0ccecee27f4f542208c"} Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.188310 4603 scope.go:117] "RemoveContainer" containerID="ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578" Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.219294 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.224670 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-jrnm6"] Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.233225 4603 scope.go:117] "RemoveContainer" containerID="ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578" Sep 30 19:55:07 crc kubenswrapper[4603]: E0930 19:55:07.234009 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578\": container with ID starting with ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578 not found: ID does not exist" containerID="ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578" Sep 30 19:55:07 crc kubenswrapper[4603]: I0930 19:55:07.234070 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578"} err="failed to get container status \"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578\": rpc error: code = NotFound desc = could not find container \"ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578\": container with ID starting with ec587b9f5ddb877de3e94abc03c464ff2f9637559b02195fa7ff8c6203dd2578 not found: ID does not exist" Sep 30 19:55:08 crc kubenswrapper[4603]: I0930 19:55:08.441941 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:55:08 crc kubenswrapper[4603]: I0930 19:55:08.442018 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:55:08 crc kubenswrapper[4603]: I0930 19:55:08.774492 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" path="/var/lib/kubelet/pods/a4af16ef-bdd1-4804-a11c-d9eda6ed782a/volumes" Sep 30 19:55:38 crc kubenswrapper[4603]: I0930 19:55:38.441562 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:55:38 crc kubenswrapper[4603]: I0930 19:55:38.442074 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:56:08 crc kubenswrapper[4603]: I0930 19:56:08.441825 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:56:08 crc kubenswrapper[4603]: I0930 19:56:08.442331 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:56:08 crc kubenswrapper[4603]: I0930 19:56:08.442375 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:56:08 crc kubenswrapper[4603]: I0930 19:56:08.442891 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:56:08 crc kubenswrapper[4603]: I0930 19:56:08.442958 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765" gracePeriod=600 Sep 30 19:56:09 crc kubenswrapper[4603]: I0930 19:56:09.573834 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765" exitCode=0 Sep 30 19:56:09 crc kubenswrapper[4603]: I0930 19:56:09.573923 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765"} Sep 30 19:56:09 crc kubenswrapper[4603]: I0930 19:56:09.574437 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087"} Sep 30 19:56:09 crc kubenswrapper[4603]: I0930 19:56:09.574466 4603 scope.go:117] "RemoveContainer" containerID="3fc87123db778ffd8d2c7311b3ecccc004a76f1acae0d79160e3018213534e6b" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.723818 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-phxbq"] Sep 30 19:57:51 crc kubenswrapper[4603]: E0930 19:57:51.724534 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" containerName="registry" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.724547 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" containerName="registry" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.724631 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4af16ef-bdd1-4804-a11c-d9eda6ed782a" containerName="registry" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.724981 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.728020 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.728313 4603 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-8wmvx" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.728432 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.755460 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x4ppt"] Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.756196 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-x4ppt" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.758270 4603 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-xg7t2" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.759518 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-q7ttm"] Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.760512 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.763204 4603 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-gftlc" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.775328 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-q7ttm"] Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.779635 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x4ppt"] Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.791308 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-phxbq"] Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.849392 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vztj2\" (UniqueName: \"kubernetes.io/projected/b091df7c-bb72-483d-a232-76684ca02eeb-kube-api-access-vztj2\") pod \"cert-manager-cainjector-7f985d654d-phxbq\" (UID: \"b091df7c-bb72-483d-a232-76684ca02eeb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.951281 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9gp6\" (UniqueName: \"kubernetes.io/projected/02bbad49-bd1e-4b2a-bcaf-e87517081eab-kube-api-access-k9gp6\") pod \"cert-manager-webhook-5655c58dd6-q7ttm\" (UID: \"02bbad49-bd1e-4b2a-bcaf-e87517081eab\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.951317 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vztj2\" (UniqueName: \"kubernetes.io/projected/b091df7c-bb72-483d-a232-76684ca02eeb-kube-api-access-vztj2\") pod \"cert-manager-cainjector-7f985d654d-phxbq\" (UID: \"b091df7c-bb72-483d-a232-76684ca02eeb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.951352 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq46p\" (UniqueName: \"kubernetes.io/projected/9fd7e687-d7c9-4656-9665-491bbec118a0-kube-api-access-zq46p\") pod \"cert-manager-5b446d88c5-x4ppt\" (UID: \"9fd7e687-d7c9-4656-9665-491bbec118a0\") " pod="cert-manager/cert-manager-5b446d88c5-x4ppt" Sep 30 19:57:51 crc kubenswrapper[4603]: I0930 19:57:51.974463 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vztj2\" (UniqueName: \"kubernetes.io/projected/b091df7c-bb72-483d-a232-76684ca02eeb-kube-api-access-vztj2\") pod \"cert-manager-cainjector-7f985d654d-phxbq\" (UID: \"b091df7c-bb72-483d-a232-76684ca02eeb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.043649 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.052834 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9gp6\" (UniqueName: \"kubernetes.io/projected/02bbad49-bd1e-4b2a-bcaf-e87517081eab-kube-api-access-k9gp6\") pod \"cert-manager-webhook-5655c58dd6-q7ttm\" (UID: \"02bbad49-bd1e-4b2a-bcaf-e87517081eab\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.052913 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq46p\" (UniqueName: \"kubernetes.io/projected/9fd7e687-d7c9-4656-9665-491bbec118a0-kube-api-access-zq46p\") pod \"cert-manager-5b446d88c5-x4ppt\" (UID: \"9fd7e687-d7c9-4656-9665-491bbec118a0\") " pod="cert-manager/cert-manager-5b446d88c5-x4ppt" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.079872 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq46p\" (UniqueName: \"kubernetes.io/projected/9fd7e687-d7c9-4656-9665-491bbec118a0-kube-api-access-zq46p\") pod \"cert-manager-5b446d88c5-x4ppt\" (UID: \"9fd7e687-d7c9-4656-9665-491bbec118a0\") " pod="cert-manager/cert-manager-5b446d88c5-x4ppt" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.083914 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9gp6\" (UniqueName: \"kubernetes.io/projected/02bbad49-bd1e-4b2a-bcaf-e87517081eab-kube-api-access-k9gp6\") pod \"cert-manager-webhook-5655c58dd6-q7ttm\" (UID: \"02bbad49-bd1e-4b2a-bcaf-e87517081eab\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.219826 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-phxbq"] Sep 30 19:57:52 crc kubenswrapper[4603]: W0930 19:57:52.237556 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb091df7c_bb72_483d_a232_76684ca02eeb.slice/crio-54773765c96f3948f09287dfede0ae53d04a5f6291a1e0cf83c2057bbc8530f5 WatchSource:0}: Error finding container 54773765c96f3948f09287dfede0ae53d04a5f6291a1e0cf83c2057bbc8530f5: Status 404 returned error can't find the container with id 54773765c96f3948f09287dfede0ae53d04a5f6291a1e0cf83c2057bbc8530f5 Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.239357 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.370357 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-x4ppt" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.378630 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.590075 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x4ppt"] Sep 30 19:57:52 crc kubenswrapper[4603]: W0930 19:57:52.599771 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fd7e687_d7c9_4656_9665_491bbec118a0.slice/crio-2044abd59db4b949cdf53732a75ef6ae1c612fe178eaa66d0da10aba54d3a8e5 WatchSource:0}: Error finding container 2044abd59db4b949cdf53732a75ef6ae1c612fe178eaa66d0da10aba54d3a8e5: Status 404 returned error can't find the container with id 2044abd59db4b949cdf53732a75ef6ae1c612fe178eaa66d0da10aba54d3a8e5 Sep 30 19:57:52 crc kubenswrapper[4603]: I0930 19:57:52.641452 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-q7ttm"] Sep 30 19:57:52 crc kubenswrapper[4603]: W0930 19:57:52.644151 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02bbad49_bd1e_4b2a_bcaf_e87517081eab.slice/crio-a8ca33f57201117c38e2c83e5b5c124c0ab08da83fb54d90884c6224dcfe9fa4 WatchSource:0}: Error finding container a8ca33f57201117c38e2c83e5b5c124c0ab08da83fb54d90884c6224dcfe9fa4: Status 404 returned error can't find the container with id a8ca33f57201117c38e2c83e5b5c124c0ab08da83fb54d90884c6224dcfe9fa4 Sep 30 19:57:53 crc kubenswrapper[4603]: I0930 19:57:53.221492 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-x4ppt" event={"ID":"9fd7e687-d7c9-4656-9665-491bbec118a0","Type":"ContainerStarted","Data":"2044abd59db4b949cdf53732a75ef6ae1c612fe178eaa66d0da10aba54d3a8e5"} Sep 30 19:57:53 crc kubenswrapper[4603]: I0930 19:57:53.222612 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" event={"ID":"b091df7c-bb72-483d-a232-76684ca02eeb","Type":"ContainerStarted","Data":"54773765c96f3948f09287dfede0ae53d04a5f6291a1e0cf83c2057bbc8530f5"} Sep 30 19:57:53 crc kubenswrapper[4603]: I0930 19:57:53.224759 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" event={"ID":"02bbad49-bd1e-4b2a-bcaf-e87517081eab","Type":"ContainerStarted","Data":"a8ca33f57201117c38e2c83e5b5c124c0ab08da83fb54d90884c6224dcfe9fa4"} Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.245901 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" event={"ID":"b091df7c-bb72-483d-a232-76684ca02eeb","Type":"ContainerStarted","Data":"6989bf9b24ce1b35a07eaed8b9c1739cefed496c0cc9ed9d79d1fb97ad41de5d"} Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.247233 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" event={"ID":"02bbad49-bd1e-4b2a-bcaf-e87517081eab","Type":"ContainerStarted","Data":"0133f2d0b6ce7be1d785bd70f002812e4a161e6e30cb5e081abf7c7c3862f347"} Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.247594 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.249749 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-x4ppt" event={"ID":"9fd7e687-d7c9-4656-9665-491bbec118a0","Type":"ContainerStarted","Data":"256c3359250a9afe129bac0f1cba4ac5b0b6c388bc7b3ed9c243dabe75d6ce59"} Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.264540 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-phxbq" podStartSLOduration=1.588054182 podStartE2EDuration="5.264522044s" podCreationTimestamp="2025-09-30 19:57:51 +0000 UTC" firstStartedPulling="2025-09-30 19:57:52.239064026 +0000 UTC m=+674.177522844" lastFinishedPulling="2025-09-30 19:57:55.915531878 +0000 UTC m=+677.853990706" observedRunningTime="2025-09-30 19:57:56.263957588 +0000 UTC m=+678.202416446" watchObservedRunningTime="2025-09-30 19:57:56.264522044 +0000 UTC m=+678.202980882" Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.287436 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-x4ppt" podStartSLOduration=2.061186289 podStartE2EDuration="5.287420452s" podCreationTimestamp="2025-09-30 19:57:51 +0000 UTC" firstStartedPulling="2025-09-30 19:57:52.601904814 +0000 UTC m=+674.540363632" lastFinishedPulling="2025-09-30 19:57:55.828138977 +0000 UTC m=+677.766597795" observedRunningTime="2025-09-30 19:57:56.285754004 +0000 UTC m=+678.224212842" watchObservedRunningTime="2025-09-30 19:57:56.287420452 +0000 UTC m=+678.225879270" Sep 30 19:57:56 crc kubenswrapper[4603]: I0930 19:57:56.312880 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" podStartSLOduration=2.131724003 podStartE2EDuration="5.312862541s" podCreationTimestamp="2025-09-30 19:57:51 +0000 UTC" firstStartedPulling="2025-09-30 19:57:52.647018579 +0000 UTC m=+674.585477397" lastFinishedPulling="2025-09-30 19:57:55.828157117 +0000 UTC m=+677.766615935" observedRunningTime="2025-09-30 19:57:56.31178736 +0000 UTC m=+678.250246188" watchObservedRunningTime="2025-09-30 19:57:56.312862541 +0000 UTC m=+678.251321369" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.296890 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-blpqj"] Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298098 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-controller" containerID="cri-o://d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298648 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="sbdb" containerID="cri-o://89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298721 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="nbdb" containerID="cri-o://e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298779 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="northd" containerID="cri-o://10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298830 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298884 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-node" containerID="cri-o://a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.298935 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-acl-logging" containerID="cri-o://96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.381757 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-q7ttm" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.382421 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" containerID="cri-o://adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" gracePeriod=30 Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.677486 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/3.log" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.679322 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovn-acl-logging/0.log" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.679792 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovn-controller/0.log" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.680220 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694200 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694334 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkhtc\" (UniqueName: \"kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694278 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694486 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694557 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694664 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694782 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694875 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694961 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694667 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694695 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket" (OuterVolumeSpecName: "log-socket") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694849 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.694939 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695014 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695044 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695197 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695064 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695462 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695554 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695636 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695791 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695866 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695562 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695588 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695751 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log" (OuterVolumeSpecName: "node-log") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695848 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695941 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696084 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash" (OuterVolumeSpecName: "host-slash") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.695960 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696301 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696406 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696487 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696562 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696648 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd\") pod \"543e574d-42bb-453f-ade2-2e9b5904a3d3\" (UID: \"543e574d-42bb-453f-ade2-2e9b5904a3d3\") " Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.696797 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697025 4603 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697099 4603 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697145 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697215 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697160 4603 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697363 4603 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697429 4603 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697498 4603 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697569 4603 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697639 4603 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697704 4603 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697768 4603 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697844 4603 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.697921 4603 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.698004 4603 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.698089 4603 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.698162 4603 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.699997 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.700096 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc" (OuterVolumeSpecName: "kube-api-access-nkhtc") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "kube-api-access-nkhtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.709522 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "543e574d-42bb-453f-ade2-2e9b5904a3d3" (UID: "543e574d-42bb-453f-ade2-2e9b5904a3d3"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734400 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2ntl5"] Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734650 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-acl-logging" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734665 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-acl-logging" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734675 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-node" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734682 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-node" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734691 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734697 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734705 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734711 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734717 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kubecfg-setup" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734723 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kubecfg-setup" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734731 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734736 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734747 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="nbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734752 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="nbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734760 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734765 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734773 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734778 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734786 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="northd" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734791 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="northd" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734802 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734807 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734813 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="sbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734818 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="sbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: E0930 19:58:02.734827 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734833 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734920 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734926 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734933 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-node" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734941 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734948 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734956 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="sbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734965 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="nbdb" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734975 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="northd" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734982 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.734990 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovn-acl-logging" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.735142 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.735150 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerName="ovnkube-controller" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.736553 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799325 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-netns\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799363 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-bin\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799419 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-env-overrides\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799464 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-systemd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799482 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-ovn\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799499 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-var-lib-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799518 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799550 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-node-log\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799566 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-systemd-units\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799582 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-netd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799597 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799635 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-kubelet\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799655 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-log-socket\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799675 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-script-lib\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799705 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-etc-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799720 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/501d5198-89f1-4012-a59f-db0b73271b2a-ovn-node-metrics-cert\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799736 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m54zz\" (UniqueName: \"kubernetes.io/projected/501d5198-89f1-4012-a59f-db0b73271b2a-kube-api-access-m54zz\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799752 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-slash\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799781 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-config\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799797 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799828 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkhtc\" (UniqueName: \"kubernetes.io/projected/543e574d-42bb-453f-ade2-2e9b5904a3d3-kube-api-access-nkhtc\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799855 4603 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/543e574d-42bb-453f-ade2-2e9b5904a3d3-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799865 4603 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/543e574d-42bb-453f-ade2-2e9b5904a3d3-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799873 4603 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.799881 4603 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/543e574d-42bb-453f-ade2-2e9b5904a3d3-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901044 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-log-socket\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901352 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-script-lib\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-etc-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901386 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/501d5198-89f1-4012-a59f-db0b73271b2a-ovn-node-metrics-cert\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901403 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m54zz\" (UniqueName: \"kubernetes.io/projected/501d5198-89f1-4012-a59f-db0b73271b2a-kube-api-access-m54zz\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901411 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-etc-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901437 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-slash\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901417 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-slash\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901471 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-config\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901495 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901522 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-netns\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901548 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-bin\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901573 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-env-overrides\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901608 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-systemd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901633 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-ovn\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901658 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-var-lib-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901251 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-log-socket\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901682 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901694 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-bin\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901702 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-systemd-units\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901724 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-node-log\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901749 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-netd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901772 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901801 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-kubelet\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.901865 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-kubelet\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902082 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902227 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-systemd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902235 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-config\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902276 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-netns\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902257 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902309 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-env-overrides\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902332 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-systemd-units\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902315 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-node-log\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902312 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-run-ovn\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902344 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-var-lib-openvswitch\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902358 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-cni-netd\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902387 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/501d5198-89f1-4012-a59f-db0b73271b2a-host-run-ovn-kubernetes\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.902545 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/501d5198-89f1-4012-a59f-db0b73271b2a-ovnkube-script-lib\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.905691 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/501d5198-89f1-4012-a59f-db0b73271b2a-ovn-node-metrics-cert\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:02 crc kubenswrapper[4603]: I0930 19:58:02.916314 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m54zz\" (UniqueName: \"kubernetes.io/projected/501d5198-89f1-4012-a59f-db0b73271b2a-kube-api-access-m54zz\") pod \"ovnkube-node-2ntl5\" (UID: \"501d5198-89f1-4012-a59f-db0b73271b2a\") " pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.051886 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:03 crc kubenswrapper[4603]: W0930 19:58:03.095200 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod501d5198_89f1_4012_a59f_db0b73271b2a.slice/crio-93ee72a858562b441c5383ca67f9ec85accca274c92f410ad38190731960f554 WatchSource:0}: Error finding container 93ee72a858562b441c5383ca67f9ec85accca274c92f410ad38190731960f554: Status 404 returned error can't find the container with id 93ee72a858562b441c5383ca67f9ec85accca274c92f410ad38190731960f554 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.305076 4603 generic.go:334] "Generic (PLEG): container finished" podID="501d5198-89f1-4012-a59f-db0b73271b2a" containerID="c13dfb4209d8d204239ea3b93ee4ff7599b12809a1d61dc632b9fe60f7319f2c" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.305190 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerDied","Data":"c13dfb4209d8d204239ea3b93ee4ff7599b12809a1d61dc632b9fe60f7319f2c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.305224 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"93ee72a858562b441c5383ca67f9ec85accca274c92f410ad38190731960f554"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.308470 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/2.log" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.308948 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/1.log" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.309001 4603 generic.go:334] "Generic (PLEG): container finished" podID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" containerID="637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b" exitCode=2 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.309052 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerDied","Data":"637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.309084 4603 scope.go:117] "RemoveContainer" containerID="6ad435e9c861c655be5b1f87891bb147fd5c24fb08b8b75596fec608e1fdfac8" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.309559 4603 scope.go:117] "RemoveContainer" containerID="637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.309755 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-6sgvc_openshift-multus(d0d17316-8ee1-4df6-98b6-eefa64f035d9)\"" pod="openshift-multus/multus-6sgvc" podUID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.315556 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovnkube-controller/3.log" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.320990 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovn-acl-logging/0.log" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.321635 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-blpqj_543e574d-42bb-453f-ade2-2e9b5904a3d3/ovn-controller/0.log" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.322925 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.322981 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.322994 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323005 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323017 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323027 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" exitCode=0 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323037 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" exitCode=143 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323048 4603 generic.go:334] "Generic (PLEG): container finished" podID="543e574d-42bb-453f-ade2-2e9b5904a3d3" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" exitCode=143 Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323078 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323114 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323134 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323151 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323189 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323207 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323228 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323244 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323253 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323263 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323272 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323281 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323290 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323300 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323309 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323318 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323330 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323348 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323361 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323371 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323380 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323389 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323397 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323408 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323418 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323426 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323435 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323447 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323464 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323475 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323486 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323495 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323494 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.323503 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324267 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324287 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324295 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324303 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324310 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324329 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-blpqj" event={"ID":"543e574d-42bb-453f-ade2-2e9b5904a3d3","Type":"ContainerDied","Data":"82ef2d703e3e16d073fb16b641ebb1ce606e5c0797d94d6d0fccafa2b8eb230a"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324356 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324369 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324376 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324382 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324388 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324395 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324403 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324410 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324416 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.324423 4603 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.342951 4603 scope.go:117] "RemoveContainer" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.371503 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.387900 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-blpqj"] Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.400617 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-blpqj"] Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.403914 4603 scope.go:117] "RemoveContainer" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.425297 4603 scope.go:117] "RemoveContainer" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.439313 4603 scope.go:117] "RemoveContainer" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.454748 4603 scope.go:117] "RemoveContainer" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.479646 4603 scope.go:117] "RemoveContainer" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.496142 4603 scope.go:117] "RemoveContainer" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.526498 4603 scope.go:117] "RemoveContainer" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.547642 4603 scope.go:117] "RemoveContainer" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.565502 4603 scope.go:117] "RemoveContainer" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.565910 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": container with ID starting with adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b not found: ID does not exist" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.565942 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} err="failed to get container status \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": rpc error: code = NotFound desc = could not find container \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": container with ID starting with adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.565962 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.566617 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": container with ID starting with 9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574 not found: ID does not exist" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.566643 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} err="failed to get container status \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": rpc error: code = NotFound desc = could not find container \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": container with ID starting with 9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.566657 4603 scope.go:117] "RemoveContainer" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.566910 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": container with ID starting with 89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e not found: ID does not exist" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.566939 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} err="failed to get container status \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": rpc error: code = NotFound desc = could not find container \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": container with ID starting with 89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.566956 4603 scope.go:117] "RemoveContainer" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.568421 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": container with ID starting with e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b not found: ID does not exist" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.568449 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} err="failed to get container status \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": rpc error: code = NotFound desc = could not find container \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": container with ID starting with e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.568466 4603 scope.go:117] "RemoveContainer" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.568708 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": container with ID starting with 10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff not found: ID does not exist" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.568736 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} err="failed to get container status \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": rpc error: code = NotFound desc = could not find container \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": container with ID starting with 10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.568753 4603 scope.go:117] "RemoveContainer" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.568994 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": container with ID starting with 34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1 not found: ID does not exist" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569019 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} err="failed to get container status \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": rpc error: code = NotFound desc = could not find container \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": container with ID starting with 34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569035 4603 scope.go:117] "RemoveContainer" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.569293 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": container with ID starting with a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5 not found: ID does not exist" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569319 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} err="failed to get container status \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": rpc error: code = NotFound desc = could not find container \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": container with ID starting with a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569337 4603 scope.go:117] "RemoveContainer" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.569569 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": container with ID starting with 96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481 not found: ID does not exist" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569597 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} err="failed to get container status \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": rpc error: code = NotFound desc = could not find container \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": container with ID starting with 96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569619 4603 scope.go:117] "RemoveContainer" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.569841 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": container with ID starting with d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c not found: ID does not exist" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569867 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} err="failed to get container status \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": rpc error: code = NotFound desc = could not find container \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": container with ID starting with d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.569882 4603 scope.go:117] "RemoveContainer" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: E0930 19:58:03.570159 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": container with ID starting with 4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622 not found: ID does not exist" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570197 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} err="failed to get container status \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": rpc error: code = NotFound desc = could not find container \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": container with ID starting with 4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570214 4603 scope.go:117] "RemoveContainer" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570444 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} err="failed to get container status \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": rpc error: code = NotFound desc = could not find container \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": container with ID starting with adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570472 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570701 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} err="failed to get container status \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": rpc error: code = NotFound desc = could not find container \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": container with ID starting with 9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570729 4603 scope.go:117] "RemoveContainer" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.570993 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} err="failed to get container status \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": rpc error: code = NotFound desc = could not find container \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": container with ID starting with 89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571020 4603 scope.go:117] "RemoveContainer" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571418 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} err="failed to get container status \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": rpc error: code = NotFound desc = could not find container \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": container with ID starting with e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571465 4603 scope.go:117] "RemoveContainer" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571672 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} err="failed to get container status \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": rpc error: code = NotFound desc = could not find container \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": container with ID starting with 10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571696 4603 scope.go:117] "RemoveContainer" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571907 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} err="failed to get container status \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": rpc error: code = NotFound desc = could not find container \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": container with ID starting with 34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.571930 4603 scope.go:117] "RemoveContainer" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572302 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} err="failed to get container status \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": rpc error: code = NotFound desc = could not find container \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": container with ID starting with a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572327 4603 scope.go:117] "RemoveContainer" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572570 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} err="failed to get container status \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": rpc error: code = NotFound desc = could not find container \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": container with ID starting with 96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572593 4603 scope.go:117] "RemoveContainer" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572795 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} err="failed to get container status \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": rpc error: code = NotFound desc = could not find container \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": container with ID starting with d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.572816 4603 scope.go:117] "RemoveContainer" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573039 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} err="failed to get container status \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": rpc error: code = NotFound desc = could not find container \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": container with ID starting with 4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573061 4603 scope.go:117] "RemoveContainer" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573276 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} err="failed to get container status \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": rpc error: code = NotFound desc = could not find container \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": container with ID starting with adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573299 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573511 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} err="failed to get container status \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": rpc error: code = NotFound desc = could not find container \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": container with ID starting with 9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573534 4603 scope.go:117] "RemoveContainer" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573740 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} err="failed to get container status \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": rpc error: code = NotFound desc = could not find container \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": container with ID starting with 89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573763 4603 scope.go:117] "RemoveContainer" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.573981 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} err="failed to get container status \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": rpc error: code = NotFound desc = could not find container \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": container with ID starting with e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574005 4603 scope.go:117] "RemoveContainer" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574222 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} err="failed to get container status \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": rpc error: code = NotFound desc = could not find container \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": container with ID starting with 10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574244 4603 scope.go:117] "RemoveContainer" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574452 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} err="failed to get container status \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": rpc error: code = NotFound desc = could not find container \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": container with ID starting with 34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574478 4603 scope.go:117] "RemoveContainer" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574702 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} err="failed to get container status \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": rpc error: code = NotFound desc = could not find container \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": container with ID starting with a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574728 4603 scope.go:117] "RemoveContainer" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574942 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} err="failed to get container status \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": rpc error: code = NotFound desc = could not find container \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": container with ID starting with 96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.574965 4603 scope.go:117] "RemoveContainer" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575156 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} err="failed to get container status \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": rpc error: code = NotFound desc = could not find container \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": container with ID starting with d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575290 4603 scope.go:117] "RemoveContainer" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575526 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} err="failed to get container status \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": rpc error: code = NotFound desc = could not find container \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": container with ID starting with 4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575547 4603 scope.go:117] "RemoveContainer" containerID="adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575806 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b"} err="failed to get container status \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": rpc error: code = NotFound desc = could not find container \"adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b\": container with ID starting with adf0795c9de51c61b41d28a7a38cbdc65dd28e5b1a6a672da11580aa12ef0e6b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.575832 4603 scope.go:117] "RemoveContainer" containerID="9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576044 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574"} err="failed to get container status \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": rpc error: code = NotFound desc = could not find container \"9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574\": container with ID starting with 9f777bd71c11cd51517fc43e78cf39533a840ad77c31b1f0739850bc5dcdd574 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576066 4603 scope.go:117] "RemoveContainer" containerID="89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576327 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e"} err="failed to get container status \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": rpc error: code = NotFound desc = could not find container \"89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e\": container with ID starting with 89a740e9a048bf40b3e2b8720232b0ddd386788342d8f1e4a2425a00329a350e not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576349 4603 scope.go:117] "RemoveContainer" containerID="e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576585 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b"} err="failed to get container status \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": rpc error: code = NotFound desc = could not find container \"e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b\": container with ID starting with e9e07c244d4d63427e2a488eb56db21af232c8190f15e29972dd1236318c928b not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.576606 4603 scope.go:117] "RemoveContainer" containerID="10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577122 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff"} err="failed to get container status \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": rpc error: code = NotFound desc = could not find container \"10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff\": container with ID starting with 10d4d00a2a83be3d2abf994c1a52f8c7d75472602c3728d9de53b7abf65c15ff not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577146 4603 scope.go:117] "RemoveContainer" containerID="34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577519 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1"} err="failed to get container status \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": rpc error: code = NotFound desc = could not find container \"34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1\": container with ID starting with 34a91d2bc615e4a3d9c77c283141f642255012b5fe02c2f4c9a969cbc64d96a1 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577546 4603 scope.go:117] "RemoveContainer" containerID="a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577855 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5"} err="failed to get container status \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": rpc error: code = NotFound desc = could not find container \"a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5\": container with ID starting with a3e971857ce71b02dc1e64708d249d913f7cb4879e811d72e313226a3af7c8c5 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.577878 4603 scope.go:117] "RemoveContainer" containerID="96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.578113 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481"} err="failed to get container status \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": rpc error: code = NotFound desc = could not find container \"96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481\": container with ID starting with 96f1da078725e08580bf1c7e73d807f740688628920d318f7dec62c4b2916481 not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.578155 4603 scope.go:117] "RemoveContainer" containerID="d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.578446 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c"} err="failed to get container status \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": rpc error: code = NotFound desc = could not find container \"d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c\": container with ID starting with d42b17a8877ba153458c66b046cd6b4e113c86af5f795595e9ff0b086401240c not found: ID does not exist" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.578470 4603 scope.go:117] "RemoveContainer" containerID="4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622" Sep 30 19:58:03 crc kubenswrapper[4603]: I0930 19:58:03.578858 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622"} err="failed to get container status \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": rpc error: code = NotFound desc = could not find container \"4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622\": container with ID starting with 4c17aba51b3c7e5ed4e7cfac8579792eedc4f183951788ea769c2c7c7a3e5622 not found: ID does not exist" Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336525 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"5e3b12eb7aa5456b37efe4d3621e4dd9a7988b4956cfbb22d1db4c8bae37f9a3"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336605 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"25e01db1860d2379a72b8565d782529495473a9c4ab35c05394dfd97433f6b54"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336632 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"152bc9ac1ad5a5f2869abb3c31e836e0f14ea0c6dc2a57a8cba5781ee7268ed7"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336651 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"3d83482ed9807a7d2282a439f03a1a4491a0d6f275b08954e53805d77dfc0748"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336686 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"ef3fbf759e07b67607dcc132c5876250aa448c9f85969dd03f5d35e98ee4c7f9"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.336705 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"5329a2735610943240bb9670dd137b19fb0094079728ff215094d1ac861f55d5"} Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.339627 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/2.log" Sep 30 19:58:04 crc kubenswrapper[4603]: I0930 19:58:04.777842 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="543e574d-42bb-453f-ade2-2e9b5904a3d3" path="/var/lib/kubelet/pods/543e574d-42bb-453f-ade2-2e9b5904a3d3/volumes" Sep 30 19:58:06 crc kubenswrapper[4603]: I0930 19:58:06.357306 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"8940be5209931f609c4d4dfdb82f8d7ec469abb31ac06999a7818e48586ba6f4"} Sep 30 19:58:08 crc kubenswrapper[4603]: I0930 19:58:08.441961 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:58:08 crc kubenswrapper[4603]: I0930 19:58:08.442393 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.385141 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" event={"ID":"501d5198-89f1-4012-a59f-db0b73271b2a","Type":"ContainerStarted","Data":"dde7d62996ffbbb27ab72aa616eeb95f86632501cc64e8766d0552306b35f07a"} Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.385485 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.385506 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.385524 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.425813 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.427977 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:09 crc kubenswrapper[4603]: I0930 19:58:09.449140 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" podStartSLOduration=7.449125102 podStartE2EDuration="7.449125102s" podCreationTimestamp="2025-09-30 19:58:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:58:09.419644918 +0000 UTC m=+691.358103736" watchObservedRunningTime="2025-09-30 19:58:09.449125102 +0000 UTC m=+691.387583920" Sep 30 19:58:16 crc kubenswrapper[4603]: I0930 19:58:16.764060 4603 scope.go:117] "RemoveContainer" containerID="637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b" Sep 30 19:58:16 crc kubenswrapper[4603]: E0930 19:58:16.764974 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-6sgvc_openshift-multus(d0d17316-8ee1-4df6-98b6-eefa64f035d9)\"" pod="openshift-multus/multus-6sgvc" podUID="d0d17316-8ee1-4df6-98b6-eefa64f035d9" Sep 30 19:58:30 crc kubenswrapper[4603]: I0930 19:58:30.764455 4603 scope.go:117] "RemoveContainer" containerID="637d757e64d52ccf7f8b9b45163c2f56b35f6a43af3cab850ec72e24dae2f03b" Sep 30 19:58:31 crc kubenswrapper[4603]: I0930 19:58:31.528958 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6sgvc_d0d17316-8ee1-4df6-98b6-eefa64f035d9/kube-multus/2.log" Sep 30 19:58:31 crc kubenswrapper[4603]: I0930 19:58:31.529337 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6sgvc" event={"ID":"d0d17316-8ee1-4df6-98b6-eefa64f035d9","Type":"ContainerStarted","Data":"fbd59290e7cd67ed52e54a0f3061458f6118d0fdf2370458ffaf315b975fe1ca"} Sep 30 19:58:33 crc kubenswrapper[4603]: I0930 19:58:33.088081 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2ntl5" Sep 30 19:58:38 crc kubenswrapper[4603]: I0930 19:58:38.441687 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:58:38 crc kubenswrapper[4603]: I0930 19:58:38.442354 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.235912 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96"] Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.238748 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.241735 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.256784 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96"] Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.282362 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.282530 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdkx2\" (UniqueName: \"kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.282748 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.384673 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.384784 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdkx2\" (UniqueName: \"kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.384898 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.385312 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.385632 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.411159 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdkx2\" (UniqueName: \"kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.563880 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:43 crc kubenswrapper[4603]: I0930 19:58:43.827649 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96"] Sep 30 19:58:44 crc kubenswrapper[4603]: I0930 19:58:44.610208 4603 generic.go:334] "Generic (PLEG): container finished" podID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerID="4ca1de6a5b254bf7e18320f756df593dbefcf15d5ae8016291429b17f07d549c" exitCode=0 Sep 30 19:58:44 crc kubenswrapper[4603]: I0930 19:58:44.610279 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" event={"ID":"bd351257-d9aa-4b23-ac84-d67cb081eee7","Type":"ContainerDied","Data":"4ca1de6a5b254bf7e18320f756df593dbefcf15d5ae8016291429b17f07d549c"} Sep 30 19:58:44 crc kubenswrapper[4603]: I0930 19:58:44.610625 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" event={"ID":"bd351257-d9aa-4b23-ac84-d67cb081eee7","Type":"ContainerStarted","Data":"d0081149634849e71a43dd291fd0469d313fef8fe62a2eebdd2efe888ac8108c"} Sep 30 19:58:46 crc kubenswrapper[4603]: I0930 19:58:46.628076 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" event={"ID":"bd351257-d9aa-4b23-ac84-d67cb081eee7","Type":"ContainerDied","Data":"e2e11c7b0f81ca3bc4717c264ba73e2b66511c23390525c6f19887f1bdf77f5d"} Sep 30 19:58:46 crc kubenswrapper[4603]: I0930 19:58:46.627877 4603 generic.go:334] "Generic (PLEG): container finished" podID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerID="e2e11c7b0f81ca3bc4717c264ba73e2b66511c23390525c6f19887f1bdf77f5d" exitCode=0 Sep 30 19:58:47 crc kubenswrapper[4603]: I0930 19:58:47.638759 4603 generic.go:334] "Generic (PLEG): container finished" podID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerID="b979f89073d4209f67b10bab5e8e9b60c6ee5b5d68a7752119fef7c84b921520" exitCode=0 Sep 30 19:58:47 crc kubenswrapper[4603]: I0930 19:58:47.638814 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" event={"ID":"bd351257-d9aa-4b23-ac84-d67cb081eee7","Type":"ContainerDied","Data":"b979f89073d4209f67b10bab5e8e9b60c6ee5b5d68a7752119fef7c84b921520"} Sep 30 19:58:48 crc kubenswrapper[4603]: I0930 19:58:48.968037 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.056780 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle\") pod \"bd351257-d9aa-4b23-ac84-d67cb081eee7\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.056908 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util\") pod \"bd351257-d9aa-4b23-ac84-d67cb081eee7\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.056951 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdkx2\" (UniqueName: \"kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2\") pod \"bd351257-d9aa-4b23-ac84-d67cb081eee7\" (UID: \"bd351257-d9aa-4b23-ac84-d67cb081eee7\") " Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.058606 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle" (OuterVolumeSpecName: "bundle") pod "bd351257-d9aa-4b23-ac84-d67cb081eee7" (UID: "bd351257-d9aa-4b23-ac84-d67cb081eee7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.064574 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2" (OuterVolumeSpecName: "kube-api-access-hdkx2") pod "bd351257-d9aa-4b23-ac84-d67cb081eee7" (UID: "bd351257-d9aa-4b23-ac84-d67cb081eee7"). InnerVolumeSpecName "kube-api-access-hdkx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.087787 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util" (OuterVolumeSpecName: "util") pod "bd351257-d9aa-4b23-ac84-d67cb081eee7" (UID: "bd351257-d9aa-4b23-ac84-d67cb081eee7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.158424 4603 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.158465 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdkx2\" (UniqueName: \"kubernetes.io/projected/bd351257-d9aa-4b23-ac84-d67cb081eee7-kube-api-access-hdkx2\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.158484 4603 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bd351257-d9aa-4b23-ac84-d67cb081eee7-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.654743 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" event={"ID":"bd351257-d9aa-4b23-ac84-d67cb081eee7","Type":"ContainerDied","Data":"d0081149634849e71a43dd291fd0469d313fef8fe62a2eebdd2efe888ac8108c"} Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.654798 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0081149634849e71a43dd291fd0469d313fef8fe62a2eebdd2efe888ac8108c" Sep 30 19:58:49 crc kubenswrapper[4603]: I0930 19:58:49.655330 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.716457 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6"] Sep 30 19:58:50 crc kubenswrapper[4603]: E0930 19:58:50.716642 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="util" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.716652 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="util" Sep 30 19:58:50 crc kubenswrapper[4603]: E0930 19:58:50.716663 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="pull" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.716669 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="pull" Sep 30 19:58:50 crc kubenswrapper[4603]: E0930 19:58:50.716685 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="extract" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.716691 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="extract" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.716787 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd351257-d9aa-4b23-ac84-d67cb081eee7" containerName="extract" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.717134 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.718715 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-b86tk" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.719566 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.719813 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.736235 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6"] Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.777960 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jq5g\" (UniqueName: \"kubernetes.io/projected/bed1084e-39ed-437d-83ba-ae195cd14423-kube-api-access-2jq5g\") pod \"nmstate-operator-5d6f6cfd66-zlch6\" (UID: \"bed1084e-39ed-437d-83ba-ae195cd14423\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.878827 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jq5g\" (UniqueName: \"kubernetes.io/projected/bed1084e-39ed-437d-83ba-ae195cd14423-kube-api-access-2jq5g\") pod \"nmstate-operator-5d6f6cfd66-zlch6\" (UID: \"bed1084e-39ed-437d-83ba-ae195cd14423\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" Sep 30 19:58:50 crc kubenswrapper[4603]: I0930 19:58:50.899106 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jq5g\" (UniqueName: \"kubernetes.io/projected/bed1084e-39ed-437d-83ba-ae195cd14423-kube-api-access-2jq5g\") pod \"nmstate-operator-5d6f6cfd66-zlch6\" (UID: \"bed1084e-39ed-437d-83ba-ae195cd14423\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" Sep 30 19:58:51 crc kubenswrapper[4603]: I0930 19:58:51.029191 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" Sep 30 19:58:51 crc kubenswrapper[4603]: I0930 19:58:51.461156 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6"] Sep 30 19:58:51 crc kubenswrapper[4603]: I0930 19:58:51.664195 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" event={"ID":"bed1084e-39ed-437d-83ba-ae195cd14423","Type":"ContainerStarted","Data":"474cb57b292bc514acc3ee9d04767c4704fabd4bb44fb628cd9f3b78f93f247a"} Sep 30 19:58:54 crc kubenswrapper[4603]: I0930 19:58:54.682521 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" event={"ID":"bed1084e-39ed-437d-83ba-ae195cd14423","Type":"ContainerStarted","Data":"6335a548d1260c3ea372411f7ea84569a27be7ff12ddf0b5546a9f138ac41183"} Sep 30 19:58:54 crc kubenswrapper[4603]: I0930 19:58:54.707513 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-zlch6" podStartSLOduration=2.082245372 podStartE2EDuration="4.707486617s" podCreationTimestamp="2025-09-30 19:58:50 +0000 UTC" firstStartedPulling="2025-09-30 19:58:51.477353 +0000 UTC m=+733.415811838" lastFinishedPulling="2025-09-30 19:58:54.102594235 +0000 UTC m=+736.041053083" observedRunningTime="2025-09-30 19:58:54.70718967 +0000 UTC m=+736.645648538" watchObservedRunningTime="2025-09-30 19:58:54.707486617 +0000 UTC m=+736.645945465" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.747996 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-cpplb"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.749004 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" Sep 30 19:58:55 crc kubenswrapper[4603]: W0930 19:58:55.751241 4603 reflector.go:561] object-"openshift-nmstate"/"nmstate-handler-dockercfg-jq9cl": failed to list *v1.Secret: secrets "nmstate-handler-dockercfg-jq9cl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Sep 30 19:58:55 crc kubenswrapper[4603]: E0930 19:58:55.751283 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"nmstate-handler-dockercfg-jq9cl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"nmstate-handler-dockercfg-jq9cl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.770021 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.770758 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.774087 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.818686 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.833217 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-85hq5"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.833939 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.837881 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-cpplb"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.846263 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4ntj\" (UniqueName: \"kubernetes.io/projected/cc0f8a40-4ff0-47cd-be21-9b3659cad490-kube-api-access-q4ntj\") pod \"nmstate-metrics-58fcddf996-cpplb\" (UID: \"cc0f8a40-4ff0-47cd-be21-9b3659cad490\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.896235 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.896836 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.899875 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-k5fzz" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.901083 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.901087 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.913853 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd"] Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947377 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-dbus-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947435 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvthd\" (UniqueName: \"kubernetes.io/projected/e4caa662-2ce2-4110-bdd5-989f27772b4c-kube-api-access-hvthd\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947456 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-ovs-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947536 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947572 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4ntj\" (UniqueName: \"kubernetes.io/projected/cc0f8a40-4ff0-47cd-be21-9b3659cad490-kube-api-access-q4ntj\") pod \"nmstate-metrics-58fcddf996-cpplb\" (UID: \"cc0f8a40-4ff0-47cd-be21-9b3659cad490\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947616 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-nmstate-lock\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.947642 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjw64\" (UniqueName: \"kubernetes.io/projected/78744fb1-2861-4f48-ac88-15cc146d4602-kube-api-access-mjw64\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:55 crc kubenswrapper[4603]: I0930 19:58:55.964136 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4ntj\" (UniqueName: \"kubernetes.io/projected/cc0f8a40-4ff0-47cd-be21-9b3659cad490-kube-api-access-q4ntj\") pod \"nmstate-metrics-58fcddf996-cpplb\" (UID: \"cc0f8a40-4ff0-47cd-be21-9b3659cad490\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.048968 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjw64\" (UniqueName: \"kubernetes.io/projected/78744fb1-2861-4f48-ac88-15cc146d4602-kube-api-access-mjw64\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049026 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-dbus-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049061 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvthd\" (UniqueName: \"kubernetes.io/projected/e4caa662-2ce2-4110-bdd5-989f27772b4c-kube-api-access-hvthd\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049080 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-ovs-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049111 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049134 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bffd6d89-d0ee-4fff-b026-afada4f9ef81-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049183 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4zjt\" (UniqueName: \"kubernetes.io/projected/bffd6d89-d0ee-4fff-b026-afada4f9ef81-kube-api-access-t4zjt\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049203 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bffd6d89-d0ee-4fff-b026-afada4f9ef81-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049228 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-nmstate-lock\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049228 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-ovs-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049292 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-nmstate-lock\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: E0930 19:58:56.049374 4603 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.049404 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/78744fb1-2861-4f48-ac88-15cc146d4602-dbus-socket\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: E0930 19:58:56.049427 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair podName:e4caa662-2ce2-4110-bdd5-989f27772b4c nodeName:}" failed. No retries permitted until 2025-09-30 19:58:56.549410462 +0000 UTC m=+738.487869290 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair") pod "nmstate-webhook-6d689559c5-lcqr7" (UID: "e4caa662-2ce2-4110-bdd5-989f27772b4c") : secret "openshift-nmstate-webhook" not found Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.065391 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjw64\" (UniqueName: \"kubernetes.io/projected/78744fb1-2861-4f48-ac88-15cc146d4602-kube-api-access-mjw64\") pod \"nmstate-handler-85hq5\" (UID: \"78744fb1-2861-4f48-ac88-15cc146d4602\") " pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.066572 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvthd\" (UniqueName: \"kubernetes.io/projected/e4caa662-2ce2-4110-bdd5-989f27772b4c-kube-api-access-hvthd\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.143865 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-66bb954594-bxtjj"] Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.144489 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.149999 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bffd6d89-d0ee-4fff-b026-afada4f9ef81-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.150053 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4zjt\" (UniqueName: \"kubernetes.io/projected/bffd6d89-d0ee-4fff-b026-afada4f9ef81-kube-api-access-t4zjt\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.150072 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bffd6d89-d0ee-4fff-b026-afada4f9ef81-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.150841 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/bffd6d89-d0ee-4fff-b026-afada4f9ef81-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.154350 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/bffd6d89-d0ee-4fff-b026-afada4f9ef81-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.156515 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-66bb954594-bxtjj"] Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.206682 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4zjt\" (UniqueName: \"kubernetes.io/projected/bffd6d89-d0ee-4fff-b026-afada4f9ef81-kube-api-access-t4zjt\") pod \"nmstate-console-plugin-864bb6dfb5-zrpwd\" (UID: \"bffd6d89-d0ee-4fff-b026-afada4f9ef81\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.208032 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251319 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251383 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-console-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251412 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-trusted-ca-bundle\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251441 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt727\" (UniqueName: \"kubernetes.io/projected/e9a559b8-b120-4799-ac10-c5c2264889f3-kube-api-access-qt727\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251555 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-oauth-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251624 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-oauth-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.251673 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-service-ca\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353185 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353476 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-console-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353497 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-trusted-ca-bundle\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353548 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt727\" (UniqueName: \"kubernetes.io/projected/e9a559b8-b120-4799-ac10-c5c2264889f3-kube-api-access-qt727\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353583 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-oauth-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353606 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-oauth-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.353651 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-service-ca\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.354755 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-service-ca\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.355506 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-trusted-ca-bundle\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.355615 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-oauth-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.356420 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e9a559b8-b120-4799-ac10-c5c2264889f3-console-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.361845 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-serving-cert\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.365221 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e9a559b8-b120-4799-ac10-c5c2264889f3-console-oauth-config\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.377181 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt727\" (UniqueName: \"kubernetes.io/projected/e9a559b8-b120-4799-ac10-c5c2264889f3-kube-api-access-qt727\") pod \"console-66bb954594-bxtjj\" (UID: \"e9a559b8-b120-4799-ac10-c5c2264889f3\") " pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.406976 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd"] Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.479865 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.556037 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.559883 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e4caa662-2ce2-4110-bdd5-989f27772b4c-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-lcqr7\" (UID: \"e4caa662-2ce2-4110-bdd5-989f27772b4c\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.697078 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" event={"ID":"bffd6d89-d0ee-4fff-b026-afada4f9ef81","Type":"ContainerStarted","Data":"04378e8211379e8de3b83b3810d19095980c22b253d7e7f37de30b4886214af3"} Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.753618 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-66bb954594-bxtjj"] Sep 30 19:58:56 crc kubenswrapper[4603]: W0930 19:58:56.765607 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9a559b8_b120_4799_ac10_c5c2264889f3.slice/crio-4d7a1cb55395899a98a1ab9b258a06245d2ae0d1a3137082101dd425bd18e642 WatchSource:0}: Error finding container 4d7a1cb55395899a98a1ab9b258a06245d2ae0d1a3137082101dd425bd18e642: Status 404 returned error can't find the container with id 4d7a1cb55395899a98a1ab9b258a06245d2ae0d1a3137082101dd425bd18e642 Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.856926 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-jq9cl" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.858379 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.863720 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" Sep 30 19:58:56 crc kubenswrapper[4603]: I0930 19:58:56.866998 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.357377 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7"] Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.404136 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-cpplb"] Sep 30 19:58:57 crc kubenswrapper[4603]: W0930 19:58:57.414989 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc0f8a40_4ff0_47cd_be21_9b3659cad490.slice/crio-8a1396ec69419b109a83d4602c6dec16ae19d63d728b6b23c9698747f323bed1 WatchSource:0}: Error finding container 8a1396ec69419b109a83d4602c6dec16ae19d63d728b6b23c9698747f323bed1: Status 404 returned error can't find the container with id 8a1396ec69419b109a83d4602c6dec16ae19d63d728b6b23c9698747f323bed1 Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.705089 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" event={"ID":"e4caa662-2ce2-4110-bdd5-989f27772b4c","Type":"ContainerStarted","Data":"c5000dfb69bf50167f4cc3f1107cab9c55e18cde7eea1c8c40e06fbf7be25207"} Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.707552 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" event={"ID":"cc0f8a40-4ff0-47cd-be21-9b3659cad490","Type":"ContainerStarted","Data":"8a1396ec69419b109a83d4602c6dec16ae19d63d728b6b23c9698747f323bed1"} Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.709089 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-85hq5" event={"ID":"78744fb1-2861-4f48-ac88-15cc146d4602","Type":"ContainerStarted","Data":"514b95c5d010cdc37420fa4b1ec6dbb423ab9f41327f15bd779d8a13dabcff2e"} Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.710940 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66bb954594-bxtjj" event={"ID":"e9a559b8-b120-4799-ac10-c5c2264889f3","Type":"ContainerStarted","Data":"6aa0ffbe06f8fa924e89958da99c8f23bce9fb7d07cff165c14cc7edf499ed53"} Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.710987 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66bb954594-bxtjj" event={"ID":"e9a559b8-b120-4799-ac10-c5c2264889f3","Type":"ContainerStarted","Data":"4d7a1cb55395899a98a1ab9b258a06245d2ae0d1a3137082101dd425bd18e642"} Sep 30 19:58:57 crc kubenswrapper[4603]: I0930 19:58:57.736323 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-66bb954594-bxtjj" podStartSLOduration=1.73630351 podStartE2EDuration="1.73630351s" podCreationTimestamp="2025-09-30 19:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:58:57.730581072 +0000 UTC m=+739.669039910" watchObservedRunningTime="2025-09-30 19:58:57.73630351 +0000 UTC m=+739.674762348" Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.728348 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-85hq5" event={"ID":"78744fb1-2861-4f48-ac88-15cc146d4602","Type":"ContainerStarted","Data":"d39c02761bee059cf0708acc8e0afaa254f51bac1ee14eee19ec84a4dfa0abc6"} Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.728970 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.732590 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" event={"ID":"e4caa662-2ce2-4110-bdd5-989f27772b4c","Type":"ContainerStarted","Data":"ee525c2e7f621ec041f65476f47362be5f0315e1bb70f78790a88586bcac6b5f"} Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.732887 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.734608 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" event={"ID":"cc0f8a40-4ff0-47cd-be21-9b3659cad490","Type":"ContainerStarted","Data":"76503c706b6c57bcd2a2f41e324a607d80c68a1e4c4927b8d316dcb5de6fc8d8"} Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.736468 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" event={"ID":"bffd6d89-d0ee-4fff-b026-afada4f9ef81","Type":"ContainerStarted","Data":"19637619c3cf5fff51ec5c0746d9386a4c8052e4738c9846bc1989469d15924b"} Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.750378 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-85hq5" podStartSLOduration=2.916123335 podStartE2EDuration="5.750359378s" podCreationTimestamp="2025-09-30 19:58:55 +0000 UTC" firstStartedPulling="2025-09-30 19:58:56.939990735 +0000 UTC m=+738.878449563" lastFinishedPulling="2025-09-30 19:58:59.774226788 +0000 UTC m=+741.712685606" observedRunningTime="2025-09-30 19:59:00.747712334 +0000 UTC m=+742.686171162" watchObservedRunningTime="2025-09-30 19:59:00.750359378 +0000 UTC m=+742.688818206" Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.769857 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-zrpwd" podStartSLOduration=2.430165195 podStartE2EDuration="5.769832708s" podCreationTimestamp="2025-09-30 19:58:55 +0000 UTC" firstStartedPulling="2025-09-30 19:58:56.427746481 +0000 UTC m=+738.366205299" lastFinishedPulling="2025-09-30 19:58:59.767413994 +0000 UTC m=+741.705872812" observedRunningTime="2025-09-30 19:59:00.767676706 +0000 UTC m=+742.706135534" watchObservedRunningTime="2025-09-30 19:59:00.769832708 +0000 UTC m=+742.708291536" Sep 30 19:59:00 crc kubenswrapper[4603]: I0930 19:59:00.798503 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" podStartSLOduration=3.37384421 podStartE2EDuration="5.79848368s" podCreationTimestamp="2025-09-30 19:58:55 +0000 UTC" firstStartedPulling="2025-09-30 19:58:57.373363843 +0000 UTC m=+739.311822701" lastFinishedPulling="2025-09-30 19:58:59.798003353 +0000 UTC m=+741.736462171" observedRunningTime="2025-09-30 19:59:00.795063648 +0000 UTC m=+742.733522486" watchObservedRunningTime="2025-09-30 19:59:00.79848368 +0000 UTC m=+742.736942508" Sep 30 19:59:02 crc kubenswrapper[4603]: I0930 19:59:02.757933 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" event={"ID":"cc0f8a40-4ff0-47cd-be21-9b3659cad490","Type":"ContainerStarted","Data":"c42a55bd7a07c71ffe108157840b13df4210927bf98d914f22238bbcbe8a8e82"} Sep 30 19:59:02 crc kubenswrapper[4603]: I0930 19:59:02.792222 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-cpplb" podStartSLOduration=2.756749374 podStartE2EDuration="7.792203451s" podCreationTimestamp="2025-09-30 19:58:55 +0000 UTC" firstStartedPulling="2025-09-30 19:58:57.420935882 +0000 UTC m=+739.359394710" lastFinishedPulling="2025-09-30 19:59:02.456389949 +0000 UTC m=+744.394848787" observedRunningTime="2025-09-30 19:59:02.789961497 +0000 UTC m=+744.728420325" watchObservedRunningTime="2025-09-30 19:59:02.792203451 +0000 UTC m=+744.730662289" Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.480111 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.480467 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.488143 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.793258 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-66bb954594-bxtjj" Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.861825 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:59:06 crc kubenswrapper[4603]: I0930 19:59:06.915601 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-85hq5" Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.441946 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.442429 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.442497 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.443545 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.443652 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087" gracePeriod=600 Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.798199 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087"} Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.798199 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087" exitCode=0 Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.798561 4603 scope.go:117] "RemoveContainer" containerID="968b99b9f3191e45659637eeee4382e1ab0f100fdd267e7cb5561608e164c765" Sep 30 19:59:08 crc kubenswrapper[4603]: I0930 19:59:08.798599 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51"} Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.409422 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.409823 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerName="controller-manager" containerID="cri-o://768c26ced9482a59e237ae5a9d4617d2344553868f54767869b0f2a18ae0de69" gracePeriod=30 Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.494891 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.495119 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" containerID="cri-o://b5f9f88d869a740a7cb713958f64d042e43dee7af52e21cd99b639c3bfe818ed" gracePeriod=30 Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.820128 4603 generic.go:334] "Generic (PLEG): container finished" podID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerID="b5f9f88d869a740a7cb713958f64d042e43dee7af52e21cd99b639c3bfe818ed" exitCode=0 Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.820189 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" event={"ID":"c7503d65-f97f-45a5-94ec-9f210ea705c9","Type":"ContainerDied","Data":"b5f9f88d869a740a7cb713958f64d042e43dee7af52e21cd99b639c3bfe818ed"} Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.821808 4603 generic.go:334] "Generic (PLEG): container finished" podID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerID="768c26ced9482a59e237ae5a9d4617d2344553868f54767869b0f2a18ae0de69" exitCode=0 Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.822403 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" event={"ID":"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c","Type":"ContainerDied","Data":"768c26ced9482a59e237ae5a9d4617d2344553868f54767869b0f2a18ae0de69"} Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.899780 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:59:09 crc kubenswrapper[4603]: I0930 19:59:09.988590 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.039697 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2svcv\" (UniqueName: \"kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv\") pod \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.039802 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config\") pod \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.039846 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca\") pod \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.039866 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles\") pod \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.039924 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert\") pod \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\" (UID: \"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.040962 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca" (OuterVolumeSpecName: "client-ca") pod "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" (UID: "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.041064 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" (UID: "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.041434 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config" (OuterVolumeSpecName: "config") pod "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" (UID: "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.046737 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" (UID: "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.047281 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv" (OuterVolumeSpecName: "kube-api-access-2svcv") pod "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" (UID: "c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c"). InnerVolumeSpecName "kube-api-access-2svcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.140669 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config\") pod \"c7503d65-f97f-45a5-94ec-9f210ea705c9\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.140717 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert\") pod \"c7503d65-f97f-45a5-94ec-9f210ea705c9\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.140766 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") pod \"c7503d65-f97f-45a5-94ec-9f210ea705c9\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.140811 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjc6m\" (UniqueName: \"kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m\") pod \"c7503d65-f97f-45a5-94ec-9f210ea705c9\" (UID: \"c7503d65-f97f-45a5-94ec-9f210ea705c9\") " Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.141015 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.141026 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2svcv\" (UniqueName: \"kubernetes.io/projected/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-kube-api-access-2svcv\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.141036 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.141044 4603 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.141052 4603 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.142464 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca" (OuterVolumeSpecName: "client-ca") pod "c7503d65-f97f-45a5-94ec-9f210ea705c9" (UID: "c7503d65-f97f-45a5-94ec-9f210ea705c9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.142608 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config" (OuterVolumeSpecName: "config") pod "c7503d65-f97f-45a5-94ec-9f210ea705c9" (UID: "c7503d65-f97f-45a5-94ec-9f210ea705c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.144770 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m" (OuterVolumeSpecName: "kube-api-access-kjc6m") pod "c7503d65-f97f-45a5-94ec-9f210ea705c9" (UID: "c7503d65-f97f-45a5-94ec-9f210ea705c9"). InnerVolumeSpecName "kube-api-access-kjc6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.144955 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c7503d65-f97f-45a5-94ec-9f210ea705c9" (UID: "c7503d65-f97f-45a5-94ec-9f210ea705c9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.241715 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.241749 4603 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7503d65-f97f-45a5-94ec-9f210ea705c9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.241760 4603 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7503d65-f97f-45a5-94ec-9f210ea705c9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.241770 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjc6m\" (UniqueName: \"kubernetes.io/projected/c7503d65-f97f-45a5-94ec-9f210ea705c9-kube-api-access-kjc6m\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.833615 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" event={"ID":"c7503d65-f97f-45a5-94ec-9f210ea705c9","Type":"ContainerDied","Data":"c5c5b1c7fb2fae907f8cecf30209fb8c119f5048b334e056ce1fbbc4cc07bd77"} Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.833684 4603 scope.go:117] "RemoveContainer" containerID="b5f9f88d869a740a7cb713958f64d042e43dee7af52e21cd99b639c3bfe818ed" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.833681 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.835816 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" event={"ID":"c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c","Type":"ContainerDied","Data":"ae9bb307b4fda4d8ac7ef0703efa9dc9b2bfa4e4fc740439a33e437e0ea91e8d"} Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.835883 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7wjwr" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.846927 4603 scope.go:117] "RemoveContainer" containerID="768c26ced9482a59e237ae5a9d4617d2344553868f54767869b0f2a18ae0de69" Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.860235 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.874687 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7wjwr"] Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.882039 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:59:10 crc kubenswrapper[4603]: I0930 19:59:10.889480 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6lk6w"] Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.476795 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6696dc5b68-5zgqn"] Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.477045 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.477061 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.477084 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerName="controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.477091 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerName="controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.477236 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" containerName="controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.477250 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" containerName="route-controller-manager" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.477629 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.480253 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw"] Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.480985 4603 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.481031 4603 reflector.go:561] object-"openshift-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.481063 4603 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.481119 4603 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.481107 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.481126 4603 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.481213 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.481055 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.481107 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.481143 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.481038 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.484714 4603 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.484783 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.484822 4603 reflector.go:561] object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2": failed to list *v1.Secret: secrets "route-controller-manager-sa-dockercfg-h2zr2" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.484860 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"route-controller-manager-sa-dockercfg-h2zr2\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"route-controller-manager-sa-dockercfg-h2zr2\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.484963 4603 reflector.go:561] object-"openshift-route-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.484986 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.485016 4603 reflector.go:561] object-"openshift-route-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.485044 4603 reflector.go:561] object-"openshift-route-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.485049 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.485063 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.485127 4603 reflector.go:561] object-"openshift-route-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.485136 4603 reflector.go:561] object-"openshift-route-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.485158 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.485159 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: W0930 19:59:11.487285 4603 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Sep 30 19:59:11 crc kubenswrapper[4603]: E0930 19:59:11.487343 4603 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.507945 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6696dc5b68-5zgqn"] Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.516442 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw"] Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658535 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz272\" (UniqueName: \"kubernetes.io/projected/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-kube-api-access-hz272\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658595 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/522eb868-b835-4cea-9feb-dd4416ace6cc-serving-cert\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658650 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-client-ca\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658685 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-config\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658705 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-config\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658727 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-proxy-ca-bundles\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658749 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52wzh\" (UniqueName: \"kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658780 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-serving-cert\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.658803 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.760480 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz272\" (UniqueName: \"kubernetes.io/projected/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-kube-api-access-hz272\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761003 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/522eb868-b835-4cea-9feb-dd4416ace6cc-serving-cert\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761220 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-client-ca\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761272 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-config\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761303 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-config\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761333 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-proxy-ca-bundles\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761363 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52wzh\" (UniqueName: \"kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761419 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-serving-cert\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:11 crc kubenswrapper[4603]: I0930 19:59:11.761453 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.434400 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.456454 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/522eb868-b835-4cea-9feb-dd4416ace6cc-serving-cert\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.471834 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.478286 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-serving-cert\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.497729 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.581392 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.583056 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.593292 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-client-ca\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.630546 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.632781 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-proxy-ca-bundles\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.677757 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.691896 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz272\" (UniqueName: \"kubernetes.io/projected/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-kube-api-access-hz272\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.719902 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.720684 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.722896 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-config\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.749780 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.752858 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d389ea48-dc8c-43da-9bbf-e7d80eedfda6-config\") pod \"route-controller-manager-6746fd7dd4-bsxxw\" (UID: \"d389ea48-dc8c-43da-9bbf-e7d80eedfda6\") " pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:12 crc kubenswrapper[4603]: E0930 19:59:12.762493 4603 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:59:12 crc kubenswrapper[4603]: E0930 19:59:12.762608 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca podName:522eb868-b835-4cea-9feb-dd4416ace6cc nodeName:}" failed. No retries permitted until 2025-09-30 19:59:13.262582244 +0000 UTC m=+755.201041092 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca") pod "controller-manager-6696dc5b68-5zgqn" (UID: "522eb868-b835-4cea-9feb-dd4416ace6cc") : failed to sync configmap cache: timed out waiting for the condition Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.773151 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c" path="/var/lib/kubelet/pods/c21bde8a-fbf8-41cc-9ee1-9a0a02305e6c/volumes" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.774050 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7503d65-f97f-45a5-94ec-9f210ea705c9" path="/var/lib/kubelet/pods/c7503d65-f97f-45a5-94ec-9f210ea705c9/volumes" Sep 30 19:59:12 crc kubenswrapper[4603]: E0930 19:59:12.785813 4603 projected.go:288] Couldn't get configMap openshift-controller-manager/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:59:12 crc kubenswrapper[4603]: E0930 19:59:12.785894 4603 projected.go:194] Error preparing data for projected volume kube-api-access-52wzh for pod openshift-controller-manager/controller-manager-6696dc5b68-5zgqn: failed to sync configmap cache: timed out waiting for the condition Sep 30 19:59:12 crc kubenswrapper[4603]: E0930 19:59:12.785950 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh podName:522eb868-b835-4cea-9feb-dd4416ace6cc nodeName:}" failed. No retries permitted until 2025-09-30 19:59:13.285930488 +0000 UTC m=+755.224389316 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-52wzh" (UniqueName: "kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh") pod "controller-manager-6696dc5b68-5zgqn" (UID: "522eb868-b835-4cea-9feb-dd4416ace6cc") : failed to sync configmap cache: timed out waiting for the condition Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.786019 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.873917 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 19:59:12 crc kubenswrapper[4603]: I0930 19:59:12.961110 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.024188 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.279417 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.280213 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/522eb868-b835-4cea-9feb-dd4416ace6cc-client-ca\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.380529 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52wzh\" (UniqueName: \"kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.387699 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52wzh\" (UniqueName: \"kubernetes.io/projected/522eb868-b835-4cea-9feb-dd4416ace6cc-kube-api-access-52wzh\") pod \"controller-manager-6696dc5b68-5zgqn\" (UID: \"522eb868-b835-4cea-9feb-dd4416ace6cc\") " pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.455660 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw"] Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.616047 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.858756 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" event={"ID":"d389ea48-dc8c-43da-9bbf-e7d80eedfda6","Type":"ContainerStarted","Data":"ec7c44d5b75cf5838d5fca672aa8b5bb434eeaf9029470d11df64bf1d47834b2"} Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.858797 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" event={"ID":"d389ea48-dc8c-43da-9bbf-e7d80eedfda6","Type":"ContainerStarted","Data":"7169a794192402a76417f8f93751cd2efad04ffab5c85521d32a0452485435f2"} Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.860878 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.880406 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" podStartSLOduration=4.880388606 podStartE2EDuration="4.880388606s" podCreationTimestamp="2025-09-30 19:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:59:13.87184849 +0000 UTC m=+755.810307338" watchObservedRunningTime="2025-09-30 19:59:13.880388606 +0000 UTC m=+755.818847434" Sep 30 19:59:13 crc kubenswrapper[4603]: I0930 19:59:13.903133 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6696dc5b68-5zgqn"] Sep 30 19:59:13 crc kubenswrapper[4603]: W0930 19:59:13.912533 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod522eb868_b835_4cea_9feb_dd4416ace6cc.slice/crio-2fcbfdf8790675041883ce9b4a0e7551089a58b6995482d6f62afb6959882e2c WatchSource:0}: Error finding container 2fcbfdf8790675041883ce9b4a0e7551089a58b6995482d6f62afb6959882e2c: Status 404 returned error can't find the container with id 2fcbfdf8790675041883ce9b4a0e7551089a58b6995482d6f62afb6959882e2c Sep 30 19:59:14 crc kubenswrapper[4603]: I0930 19:59:14.144680 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6746fd7dd4-bsxxw" Sep 30 19:59:14 crc kubenswrapper[4603]: I0930 19:59:14.865089 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" event={"ID":"522eb868-b835-4cea-9feb-dd4416ace6cc","Type":"ContainerStarted","Data":"34e15cb4254ace9a1528751f085ca1513b648074318f64ee586ce1428027cfb2"} Sep 30 19:59:14 crc kubenswrapper[4603]: I0930 19:59:14.865134 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" event={"ID":"522eb868-b835-4cea-9feb-dd4416ace6cc","Type":"ContainerStarted","Data":"2fcbfdf8790675041883ce9b4a0e7551089a58b6995482d6f62afb6959882e2c"} Sep 30 19:59:14 crc kubenswrapper[4603]: I0930 19:59:14.889200 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" podStartSLOduration=5.889148963 podStartE2EDuration="5.889148963s" podCreationTimestamp="2025-09-30 19:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 19:59:14.884737116 +0000 UTC m=+756.823195954" watchObservedRunningTime="2025-09-30 19:59:14.889148963 +0000 UTC m=+756.827607791" Sep 30 19:59:15 crc kubenswrapper[4603]: I0930 19:59:15.871365 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:15 crc kubenswrapper[4603]: I0930 19:59:15.876845 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6696dc5b68-5zgqn" Sep 30 19:59:16 crc kubenswrapper[4603]: I0930 19:59:16.868156 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-lcqr7" Sep 30 19:59:17 crc kubenswrapper[4603]: I0930 19:59:17.837665 4603 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.050151 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh"] Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.052277 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.057972 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.074420 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh"] Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.233382 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.233465 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.233500 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m7jb\" (UniqueName: \"kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.335103 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.335269 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.335326 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m7jb\" (UniqueName: \"kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.336728 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.344307 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.373226 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m7jb\" (UniqueName: \"kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.376421 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.861772 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh"] Sep 30 19:59:31 crc kubenswrapper[4603]: W0930 19:59:31.863379 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28552f7e_e802_46cc_8250_2a91a3b81f4c.slice/crio-c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543 WatchSource:0}: Error finding container c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543: Status 404 returned error can't find the container with id c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543 Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.928321 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-7gjfv" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" containerID="cri-o://0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7" gracePeriod=15 Sep 30 19:59:31 crc kubenswrapper[4603]: I0930 19:59:31.969599 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" event={"ID":"28552f7e-e802-46cc-8250-2a91a3b81f4c","Type":"ContainerStarted","Data":"c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543"} Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.077968 4603 patch_prober.go:28] interesting pod/console-f9d7485db-7gjfv container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.078061 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-7gjfv" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.468976 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7gjfv_bd8dd34e-aa2d-4388-9d52-299033710686/console/0.log" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.469070 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561293 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561380 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561410 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561431 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561461 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt9cr\" (UniqueName: \"kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561482 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.561524 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert\") pod \"bd8dd34e-aa2d-4388-9d52-299033710686\" (UID: \"bd8dd34e-aa2d-4388-9d52-299033710686\") " Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562289 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca" (OuterVolumeSpecName: "service-ca") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562298 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562404 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562550 4603 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562555 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config" (OuterVolumeSpecName: "console-config") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562566 4603 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.562579 4603 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.566578 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr" (OuterVolumeSpecName: "kube-api-access-lt9cr") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "kube-api-access-lt9cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.566696 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.567300 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "bd8dd34e-aa2d-4388-9d52-299033710686" (UID: "bd8dd34e-aa2d-4388-9d52-299033710686"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.663918 4603 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.663983 4603 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bd8dd34e-aa2d-4388-9d52-299033710686-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.664010 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt9cr\" (UniqueName: \"kubernetes.io/projected/bd8dd34e-aa2d-4388-9d52-299033710686-kube-api-access-lt9cr\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.664035 4603 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8dd34e-aa2d-4388-9d52-299033710686-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.977522 4603 generic.go:334] "Generic (PLEG): container finished" podID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerID="5ce55142ba603de5c9369947143a3f96150b8e9aef022bcb3076074e342808eb" exitCode=0 Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.977852 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" event={"ID":"28552f7e-e802-46cc-8250-2a91a3b81f4c","Type":"ContainerDied","Data":"5ce55142ba603de5c9369947143a3f96150b8e9aef022bcb3076074e342808eb"} Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980330 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-7gjfv_bd8dd34e-aa2d-4388-9d52-299033710686/console/0.log" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980365 4603 generic.go:334] "Generic (PLEG): container finished" podID="bd8dd34e-aa2d-4388-9d52-299033710686" containerID="0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7" exitCode=2 Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980385 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7gjfv" event={"ID":"bd8dd34e-aa2d-4388-9d52-299033710686","Type":"ContainerDied","Data":"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7"} Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980414 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7gjfv" event={"ID":"bd8dd34e-aa2d-4388-9d52-299033710686","Type":"ContainerDied","Data":"9fbcb7b43ad6b48ffb14479c2bb8e5bf310b9a3c29eb49d7210f2ef5a26102dd"} Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980433 4603 scope.go:117] "RemoveContainer" containerID="0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7" Sep 30 19:59:32 crc kubenswrapper[4603]: I0930 19:59:32.980515 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7gjfv" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.024154 4603 scope.go:117] "RemoveContainer" containerID="0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.024610 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:59:33 crc kubenswrapper[4603]: E0930 19:59:33.024842 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7\": container with ID starting with 0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7 not found: ID does not exist" containerID="0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.024882 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7"} err="failed to get container status \"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7\": rpc error: code = NotFound desc = could not find container \"0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7\": container with ID starting with 0d4f20db798e3269a1b8d0c55e35327f70187ac9197a23ab510e019780271ce7 not found: ID does not exist" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.029410 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-7gjfv"] Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.397839 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:33 crc kubenswrapper[4603]: E0930 19:59:33.398222 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.398244 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.398487 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" containerName="console" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.399841 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.417023 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.473887 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.473940 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.473963 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bznkn\" (UniqueName: \"kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.575128 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.575208 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bznkn\" (UniqueName: \"kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.575622 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.575672 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.575978 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.594077 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bznkn\" (UniqueName: \"kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn\") pod \"redhat-operators-7jwv7\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:33 crc kubenswrapper[4603]: I0930 19:59:33.769376 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:34 crc kubenswrapper[4603]: I0930 19:59:34.197872 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:34 crc kubenswrapper[4603]: W0930 19:59:34.201997 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7822c70_822e_4a51_8799_429be157a135.slice/crio-a97abba40680899dd782f697e834c71227f019059a04199794d08c81db77f8b2 WatchSource:0}: Error finding container a97abba40680899dd782f697e834c71227f019059a04199794d08c81db77f8b2: Status 404 returned error can't find the container with id a97abba40680899dd782f697e834c71227f019059a04199794d08c81db77f8b2 Sep 30 19:59:34 crc kubenswrapper[4603]: I0930 19:59:34.774347 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd8dd34e-aa2d-4388-9d52-299033710686" path="/var/lib/kubelet/pods/bd8dd34e-aa2d-4388-9d52-299033710686/volumes" Sep 30 19:59:34 crc kubenswrapper[4603]: I0930 19:59:34.997798 4603 generic.go:334] "Generic (PLEG): container finished" podID="c7822c70-822e-4a51-8799-429be157a135" containerID="d5362cb66590d7823af07fc1e49d38dd1e2115f3ec0a14b65fc5cc51f7809a67" exitCode=0 Sep 30 19:59:34 crc kubenswrapper[4603]: I0930 19:59:34.997884 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerDied","Data":"d5362cb66590d7823af07fc1e49d38dd1e2115f3ec0a14b65fc5cc51f7809a67"} Sep 30 19:59:34 crc kubenswrapper[4603]: I0930 19:59:34.997962 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerStarted","Data":"a97abba40680899dd782f697e834c71227f019059a04199794d08c81db77f8b2"} Sep 30 19:59:36 crc kubenswrapper[4603]: I0930 19:59:36.011455 4603 generic.go:334] "Generic (PLEG): container finished" podID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerID="92bf217cc447327b8dc80b05276aaa6a2ee8416c2196ee7d07f8698736429590" exitCode=0 Sep 30 19:59:36 crc kubenswrapper[4603]: I0930 19:59:36.011576 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" event={"ID":"28552f7e-e802-46cc-8250-2a91a3b81f4c","Type":"ContainerDied","Data":"92bf217cc447327b8dc80b05276aaa6a2ee8416c2196ee7d07f8698736429590"} Sep 30 19:59:37 crc kubenswrapper[4603]: I0930 19:59:37.023050 4603 generic.go:334] "Generic (PLEG): container finished" podID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerID="8f064f832596623c1036b2de76009115ea479cbc16900855b5cdefdb1a343dc8" exitCode=0 Sep 30 19:59:37 crc kubenswrapper[4603]: I0930 19:59:37.023243 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" event={"ID":"28552f7e-e802-46cc-8250-2a91a3b81f4c","Type":"ContainerDied","Data":"8f064f832596623c1036b2de76009115ea479cbc16900855b5cdefdb1a343dc8"} Sep 30 19:59:37 crc kubenswrapper[4603]: I0930 19:59:37.028542 4603 generic.go:334] "Generic (PLEG): container finished" podID="c7822c70-822e-4a51-8799-429be157a135" containerID="853ffe4f21f14ce5a5a49c1020ddc7dfb3ec3b6a0df5aa668f7af7ddb1169793" exitCode=0 Sep 30 19:59:37 crc kubenswrapper[4603]: I0930 19:59:37.028613 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerDied","Data":"853ffe4f21f14ce5a5a49c1020ddc7dfb3ec3b6a0df5aa668f7af7ddb1169793"} Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.370364 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.535288 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util\") pod \"28552f7e-e802-46cc-8250-2a91a3b81f4c\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.535363 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle\") pod \"28552f7e-e802-46cc-8250-2a91a3b81f4c\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.535436 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m7jb\" (UniqueName: \"kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb\") pod \"28552f7e-e802-46cc-8250-2a91a3b81f4c\" (UID: \"28552f7e-e802-46cc-8250-2a91a3b81f4c\") " Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.536424 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle" (OuterVolumeSpecName: "bundle") pod "28552f7e-e802-46cc-8250-2a91a3b81f4c" (UID: "28552f7e-e802-46cc-8250-2a91a3b81f4c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.540580 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb" (OuterVolumeSpecName: "kube-api-access-6m7jb") pod "28552f7e-e802-46cc-8250-2a91a3b81f4c" (UID: "28552f7e-e802-46cc-8250-2a91a3b81f4c"). InnerVolumeSpecName "kube-api-access-6m7jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.548344 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util" (OuterVolumeSpecName: "util") pod "28552f7e-e802-46cc-8250-2a91a3b81f4c" (UID: "28552f7e-e802-46cc-8250-2a91a3b81f4c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.636871 4603 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-util\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.636916 4603 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/28552f7e-e802-46cc-8250-2a91a3b81f4c-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:38 crc kubenswrapper[4603]: I0930 19:59:38.636928 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m7jb\" (UniqueName: \"kubernetes.io/projected/28552f7e-e802-46cc-8250-2a91a3b81f4c-kube-api-access-6m7jb\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:39 crc kubenswrapper[4603]: I0930 19:59:39.042438 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerStarted","Data":"179d94b5d7b86138ddb48ede9b5924fc9209292b2cbe3ae676487a03fd8f2a44"} Sep 30 19:59:39 crc kubenswrapper[4603]: I0930 19:59:39.044809 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" event={"ID":"28552f7e-e802-46cc-8250-2a91a3b81f4c","Type":"ContainerDied","Data":"c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543"} Sep 30 19:59:39 crc kubenswrapper[4603]: I0930 19:59:39.044840 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9938e93707a12f1da296a5a9aac6a79dcc91d58691d45ec6eabf090a5556543" Sep 30 19:59:39 crc kubenswrapper[4603]: I0930 19:59:39.044874 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh" Sep 30 19:59:39 crc kubenswrapper[4603]: I0930 19:59:39.066541 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7jwv7" podStartSLOduration=3.340229391 podStartE2EDuration="6.066525658s" podCreationTimestamp="2025-09-30 19:59:33 +0000 UTC" firstStartedPulling="2025-09-30 19:59:35.000939481 +0000 UTC m=+776.939398309" lastFinishedPulling="2025-09-30 19:59:37.727235758 +0000 UTC m=+779.665694576" observedRunningTime="2025-09-30 19:59:39.062330181 +0000 UTC m=+781.000788999" watchObservedRunningTime="2025-09-30 19:59:39.066525658 +0000 UTC m=+781.004984476" Sep 30 19:59:43 crc kubenswrapper[4603]: I0930 19:59:43.770332 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:43 crc kubenswrapper[4603]: I0930 19:59:43.770692 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:43 crc kubenswrapper[4603]: I0930 19:59:43.833020 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:44 crc kubenswrapper[4603]: I0930 19:59:44.170909 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:46 crc kubenswrapper[4603]: I0930 19:59:46.177760 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:46 crc kubenswrapper[4603]: I0930 19:59:46.178184 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7jwv7" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="registry-server" containerID="cri-o://179d94b5d7b86138ddb48ede9b5924fc9209292b2cbe3ae676487a03fd8f2a44" gracePeriod=2 Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.088363 4603 generic.go:334] "Generic (PLEG): container finished" podID="c7822c70-822e-4a51-8799-429be157a135" containerID="179d94b5d7b86138ddb48ede9b5924fc9209292b2cbe3ae676487a03fd8f2a44" exitCode=0 Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.088439 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerDied","Data":"179d94b5d7b86138ddb48ede9b5924fc9209292b2cbe3ae676487a03fd8f2a44"} Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.199372 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.346725 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities\") pod \"c7822c70-822e-4a51-8799-429be157a135\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.346781 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bznkn\" (UniqueName: \"kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn\") pod \"c7822c70-822e-4a51-8799-429be157a135\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.346836 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content\") pod \"c7822c70-822e-4a51-8799-429be157a135\" (UID: \"c7822c70-822e-4a51-8799-429be157a135\") " Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.347696 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities" (OuterVolumeSpecName: "utilities") pod "c7822c70-822e-4a51-8799-429be157a135" (UID: "c7822c70-822e-4a51-8799-429be157a135"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.353310 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn" (OuterVolumeSpecName: "kube-api-access-bznkn") pod "c7822c70-822e-4a51-8799-429be157a135" (UID: "c7822c70-822e-4a51-8799-429be157a135"). InnerVolumeSpecName "kube-api-access-bznkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.420303 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7822c70-822e-4a51-8799-429be157a135" (UID: "c7822c70-822e-4a51-8799-429be157a135"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.449067 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.449105 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bznkn\" (UniqueName: \"kubernetes.io/projected/c7822c70-822e-4a51-8799-429be157a135-kube-api-access-bznkn\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.449119 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7822c70-822e-4a51-8799-429be157a135-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.662351 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm"] Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.662805 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="util" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.662893 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="util" Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.662966 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="pull" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663023 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="pull" Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.663078 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="extract" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663149 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="extract" Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.663224 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="registry-server" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663305 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="registry-server" Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.663378 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="extract-utilities" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663442 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="extract-utilities" Sep 30 19:59:47 crc kubenswrapper[4603]: E0930 19:59:47.663510 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="extract-content" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663567 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="extract-content" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663776 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="28552f7e-e802-46cc-8250-2a91a3b81f4c" containerName="extract" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.663878 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7822c70-822e-4a51-8799-429be157a135" containerName="registry-server" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.664329 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.667097 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-c97bz" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.667304 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.667404 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.667572 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.667572 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.681562 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm"] Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.751396 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-apiservice-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.751440 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-webhook-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.751464 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh9tc\" (UniqueName: \"kubernetes.io/projected/eaf8636d-76cf-40c3-9e77-1b898b6e00be-kube-api-access-qh9tc\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.852476 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-apiservice-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.852516 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-webhook-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.852542 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh9tc\" (UniqueName: \"kubernetes.io/projected/eaf8636d-76cf-40c3-9e77-1b898b6e00be-kube-api-access-qh9tc\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.857854 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-apiservice-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.862786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaf8636d-76cf-40c3-9e77-1b898b6e00be-webhook-cert\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.869120 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh9tc\" (UniqueName: \"kubernetes.io/projected/eaf8636d-76cf-40c3-9e77-1b898b6e00be-kube-api-access-qh9tc\") pod \"metallb-operator-controller-manager-7b54d77bb4-zvnfm\" (UID: \"eaf8636d-76cf-40c3-9e77-1b898b6e00be\") " pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.961140 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn"] Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.962515 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.970974 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.970976 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.971705 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-hbmpb" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.978236 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:47 crc kubenswrapper[4603]: I0930 19:59:47.985306 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn"] Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.099105 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jwv7" event={"ID":"c7822c70-822e-4a51-8799-429be157a135","Type":"ContainerDied","Data":"a97abba40680899dd782f697e834c71227f019059a04199794d08c81db77f8b2"} Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.099152 4603 scope.go:117] "RemoveContainer" containerID="179d94b5d7b86138ddb48ede9b5924fc9209292b2cbe3ae676487a03fd8f2a44" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.099303 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jwv7" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.130202 4603 scope.go:117] "RemoveContainer" containerID="853ffe4f21f14ce5a5a49c1020ddc7dfb3ec3b6a0df5aa668f7af7ddb1169793" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.133702 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.137523 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7jwv7"] Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.155254 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-apiservice-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.155349 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-webhook-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.155378 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5ps8\" (UniqueName: \"kubernetes.io/projected/3aad58f2-49aa-472d-a347-92a699c7c78a-kube-api-access-m5ps8\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.158791 4603 scope.go:117] "RemoveContainer" containerID="d5362cb66590d7823af07fc1e49d38dd1e2115f3ec0a14b65fc5cc51f7809a67" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.257204 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-webhook-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.257246 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5ps8\" (UniqueName: \"kubernetes.io/projected/3aad58f2-49aa-472d-a347-92a699c7c78a-kube-api-access-m5ps8\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.257271 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-apiservice-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.264493 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-apiservice-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.265710 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3aad58f2-49aa-472d-a347-92a699c7c78a-webhook-cert\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.284826 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5ps8\" (UniqueName: \"kubernetes.io/projected/3aad58f2-49aa-472d-a347-92a699c7c78a-kube-api-access-m5ps8\") pod \"metallb-operator-webhook-server-696dffd44d-bfmjn\" (UID: \"3aad58f2-49aa-472d-a347-92a699c7c78a\") " pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.425054 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm"] Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.575247 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:48 crc kubenswrapper[4603]: I0930 19:59:48.792497 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7822c70-822e-4a51-8799-429be157a135" path="/var/lib/kubelet/pods/c7822c70-822e-4a51-8799-429be157a135/volumes" Sep 30 19:59:49 crc kubenswrapper[4603]: I0930 19:59:49.104462 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" event={"ID":"eaf8636d-76cf-40c3-9e77-1b898b6e00be","Type":"ContainerStarted","Data":"76c386fa5dca35311396db52fc3f105428ceb66030089d5245105d6c3444387d"} Sep 30 19:59:49 crc kubenswrapper[4603]: I0930 19:59:49.104752 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn"] Sep 30 19:59:49 crc kubenswrapper[4603]: W0930 19:59:49.127479 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3aad58f2_49aa_472d_a347_92a699c7c78a.slice/crio-d829fe29c8218b10126502abf7a657521521505b268af5f8eb6eb93fa02fb60a WatchSource:0}: Error finding container d829fe29c8218b10126502abf7a657521521505b268af5f8eb6eb93fa02fb60a: Status 404 returned error can't find the container with id d829fe29c8218b10126502abf7a657521521505b268af5f8eb6eb93fa02fb60a Sep 30 19:59:50 crc kubenswrapper[4603]: I0930 19:59:50.119463 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" event={"ID":"3aad58f2-49aa-472d-a347-92a699c7c78a","Type":"ContainerStarted","Data":"d829fe29c8218b10126502abf7a657521521505b268af5f8eb6eb93fa02fb60a"} Sep 30 19:59:53 crc kubenswrapper[4603]: I0930 19:59:53.136154 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" event={"ID":"eaf8636d-76cf-40c3-9e77-1b898b6e00be","Type":"ContainerStarted","Data":"d18378815a926d823b833abdbe8e76391847aec3960a76d931fec9cde683df3f"} Sep 30 19:59:53 crc kubenswrapper[4603]: I0930 19:59:53.136776 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 19:59:53 crc kubenswrapper[4603]: I0930 19:59:53.155082 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" podStartSLOduration=2.261312659 podStartE2EDuration="6.155067151s" podCreationTimestamp="2025-09-30 19:59:47 +0000 UTC" firstStartedPulling="2025-09-30 19:59:48.434274651 +0000 UTC m=+790.372733469" lastFinishedPulling="2025-09-30 19:59:52.328029143 +0000 UTC m=+794.266487961" observedRunningTime="2025-09-30 19:59:53.15328102 +0000 UTC m=+795.091739838" watchObservedRunningTime="2025-09-30 19:59:53.155067151 +0000 UTC m=+795.093525969" Sep 30 19:59:55 crc kubenswrapper[4603]: I0930 19:59:55.149639 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" event={"ID":"3aad58f2-49aa-472d-a347-92a699c7c78a","Type":"ContainerStarted","Data":"6b1eb07fe711a2a812a350b5eca0b84516e583b85584f431d44115b622c0228f"} Sep 30 19:59:55 crc kubenswrapper[4603]: I0930 19:59:55.150769 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 19:59:55 crc kubenswrapper[4603]: I0930 19:59:55.177717 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" podStartSLOduration=3.0481222 podStartE2EDuration="8.17769654s" podCreationTimestamp="2025-09-30 19:59:47 +0000 UTC" firstStartedPulling="2025-09-30 19:59:49.138108413 +0000 UTC m=+791.076567231" lastFinishedPulling="2025-09-30 19:59:54.267682753 +0000 UTC m=+796.206141571" observedRunningTime="2025-09-30 19:59:55.168722378 +0000 UTC m=+797.107181216" watchObservedRunningTime="2025-09-30 19:59:55.17769654 +0000 UTC m=+797.116155368" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.149772 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c"] Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.150980 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.152667 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.154372 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.162127 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c"] Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.331946 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.332048 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.332076 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nccb\" (UniqueName: \"kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.433006 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.433144 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.433193 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nccb\" (UniqueName: \"kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.433885 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.440886 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.459838 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nccb\" (UniqueName: \"kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb\") pod \"collect-profiles-29321040-5m55c\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.473606 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:00 crc kubenswrapper[4603]: I0930 20:00:00.912457 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c"] Sep 30 20:00:01 crc kubenswrapper[4603]: I0930 20:00:01.200297 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" event={"ID":"e17759eb-d1ee-4cc6-b354-f3914a671cb0","Type":"ContainerStarted","Data":"15d86a4e0f5fc2dbf4ce548a4a2076499ee886d535d49fb37a71089c0a0b93d2"} Sep 30 20:00:02 crc kubenswrapper[4603]: I0930 20:00:02.206776 4603 generic.go:334] "Generic (PLEG): container finished" podID="e17759eb-d1ee-4cc6-b354-f3914a671cb0" containerID="94d7a656def7f25ec824e3e6ef803c5741c55eb2e4bc3bdcdeb0423c7fa77d0d" exitCode=0 Sep 30 20:00:02 crc kubenswrapper[4603]: I0930 20:00:02.206825 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" event={"ID":"e17759eb-d1ee-4cc6-b354-f3914a671cb0","Type":"ContainerDied","Data":"94d7a656def7f25ec824e3e6ef803c5741c55eb2e4bc3bdcdeb0423c7fa77d0d"} Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.501581 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.634035 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume\") pod \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.634133 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume\") pod \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.634155 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nccb\" (UniqueName: \"kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb\") pod \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\" (UID: \"e17759eb-d1ee-4cc6-b354-f3914a671cb0\") " Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.635406 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume" (OuterVolumeSpecName: "config-volume") pod "e17759eb-d1ee-4cc6-b354-f3914a671cb0" (UID: "e17759eb-d1ee-4cc6-b354-f3914a671cb0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.642881 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e17759eb-d1ee-4cc6-b354-f3914a671cb0" (UID: "e17759eb-d1ee-4cc6-b354-f3914a671cb0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.649293 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb" (OuterVolumeSpecName: "kube-api-access-9nccb") pod "e17759eb-d1ee-4cc6-b354-f3914a671cb0" (UID: "e17759eb-d1ee-4cc6-b354-f3914a671cb0"). InnerVolumeSpecName "kube-api-access-9nccb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.735690 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e17759eb-d1ee-4cc6-b354-f3914a671cb0-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.735728 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nccb\" (UniqueName: \"kubernetes.io/projected/e17759eb-d1ee-4cc6-b354-f3914a671cb0-kube-api-access-9nccb\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:03 crc kubenswrapper[4603]: I0930 20:00:03.735741 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e17759eb-d1ee-4cc6-b354-f3914a671cb0-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:04 crc kubenswrapper[4603]: I0930 20:00:04.225870 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" event={"ID":"e17759eb-d1ee-4cc6-b354-f3914a671cb0","Type":"ContainerDied","Data":"15d86a4e0f5fc2dbf4ce548a4a2076499ee886d535d49fb37a71089c0a0b93d2"} Sep 30 20:00:04 crc kubenswrapper[4603]: I0930 20:00:04.225920 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c" Sep 30 20:00:04 crc kubenswrapper[4603]: I0930 20:00:04.225936 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15d86a4e0f5fc2dbf4ce548a4a2076499ee886d535d49fb37a71089c0a0b93d2" Sep 30 20:00:08 crc kubenswrapper[4603]: I0930 20:00:08.580136 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-696dffd44d-bfmjn" Sep 30 20:00:17 crc kubenswrapper[4603]: I0930 20:00:17.911915 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:17 crc kubenswrapper[4603]: E0930 20:00:17.913080 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17759eb-d1ee-4cc6-b354-f3914a671cb0" containerName="collect-profiles" Sep 30 20:00:17 crc kubenswrapper[4603]: I0930 20:00:17.913114 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17759eb-d1ee-4cc6-b354-f3914a671cb0" containerName="collect-profiles" Sep 30 20:00:17 crc kubenswrapper[4603]: I0930 20:00:17.913414 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17759eb-d1ee-4cc6-b354-f3914a671cb0" containerName="collect-profiles" Sep 30 20:00:17 crc kubenswrapper[4603]: I0930 20:00:17.919086 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:17 crc kubenswrapper[4603]: I0930 20:00:17.952371 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.039135 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxpf4\" (UniqueName: \"kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.039245 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.039319 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.140760 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.140834 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.140902 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxpf4\" (UniqueName: \"kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.141349 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.141348 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.173217 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxpf4\" (UniqueName: \"kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4\") pod \"community-operators-mnzds\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.254020 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:18 crc kubenswrapper[4603]: I0930 20:00:18.717772 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:19 crc kubenswrapper[4603]: E0930 20:00:19.029743 4603 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6ac0c67_4adb_4ab3_b5ee_e08a1e14df17.slice/crio-59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6ac0c67_4adb_4ab3_b5ee_e08a1e14df17.slice/crio-conmon-59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:00:19 crc kubenswrapper[4603]: I0930 20:00:19.324627 4603 generic.go:334] "Generic (PLEG): container finished" podID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerID="59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89" exitCode=0 Sep 30 20:00:19 crc kubenswrapper[4603]: I0930 20:00:19.324699 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerDied","Data":"59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89"} Sep 30 20:00:19 crc kubenswrapper[4603]: I0930 20:00:19.324737 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerStarted","Data":"8fdfdf7b68440f9345de6e009ea1f12c5e936d2d32a33bb17f735e275299ce02"} Sep 30 20:00:21 crc kubenswrapper[4603]: I0930 20:00:21.339714 4603 generic.go:334] "Generic (PLEG): container finished" podID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerID="de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925" exitCode=0 Sep 30 20:00:21 crc kubenswrapper[4603]: I0930 20:00:21.339833 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerDied","Data":"de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925"} Sep 30 20:00:22 crc kubenswrapper[4603]: I0930 20:00:22.351428 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerStarted","Data":"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb"} Sep 30 20:00:22 crc kubenswrapper[4603]: I0930 20:00:22.382802 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mnzds" podStartSLOduration=2.7771746 podStartE2EDuration="5.382784091s" podCreationTimestamp="2025-09-30 20:00:17 +0000 UTC" firstStartedPulling="2025-09-30 20:00:19.329005508 +0000 UTC m=+821.267464336" lastFinishedPulling="2025-09-30 20:00:21.934615009 +0000 UTC m=+823.873073827" observedRunningTime="2025-09-30 20:00:22.381132965 +0000 UTC m=+824.319591803" watchObservedRunningTime="2025-09-30 20:00:22.382784091 +0000 UTC m=+824.321242919" Sep 30 20:00:27 crc kubenswrapper[4603]: I0930 20:00:27.980766 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7b54d77bb4-zvnfm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.254751 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.254818 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.366964 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.419954 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.603587 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.705095 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.706012 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.708565 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4s59z"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.709613 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.722935 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.732778 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.733046 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-r6cnn" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.733221 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.747706 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.797898 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-87tdm"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.798781 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-87tdm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.807553 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4f5xd" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.807837 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.807922 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.808098 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.814705 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-fg62p"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.815641 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.817240 4603 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.831179 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-fg62p"] Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889436 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889488 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gdzg\" (UniqueName: \"kubernetes.io/projected/6d18b2f5-97ae-442e-af33-cc7f501a33fe-kube-api-access-6gdzg\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889527 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xpkf\" (UniqueName: \"kubernetes.io/projected/a308f483-402e-4254-a1c8-440883cde4b9-kube-api-access-8xpkf\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889544 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-startup\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889571 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-conf\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889590 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-reloader\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889608 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889627 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-sockets\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.889828 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991384 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991447 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metallb-excludel2\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991471 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-reloader\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991519 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991540 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991577 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-sockets\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991604 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4fzz\" (UniqueName: \"kubernetes.io/projected/4c195024-2cbe-4d5a-93f9-9cf1d5380440-kube-api-access-g4fzz\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991675 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991764 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991823 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k782k\" (UniqueName: \"kubernetes.io/projected/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-kube-api-access-k782k\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991850 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gdzg\" (UniqueName: \"kubernetes.io/projected/6d18b2f5-97ae-442e-af33-cc7f501a33fe-kube-api-access-6gdzg\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991900 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-cert\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.991844 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-reloader\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: E0930 20:00:28.991938 4603 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Sep 30 20:00:28 crc kubenswrapper[4603]: E0930 20:00:28.992036 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert podName:a308f483-402e-4254-a1c8-440883cde4b9 nodeName:}" failed. No retries permitted until 2025-09-30 20:00:29.492013157 +0000 UTC m=+831.430472025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert") pod "frr-k8s-webhook-server-5478bdb765-hv78l" (UID: "a308f483-402e-4254-a1c8-440883cde4b9") : secret "frr-k8s-webhook-server-cert" not found Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992039 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-sockets\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992309 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xpkf\" (UniqueName: \"kubernetes.io/projected/a308f483-402e-4254-a1c8-440883cde4b9-kube-api-access-8xpkf\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992341 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-startup\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: E0930 20:00:28.992412 4603 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992437 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-metrics-certs\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:28 crc kubenswrapper[4603]: E0930 20:00:28.992477 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs podName:6d18b2f5-97ae-442e-af33-cc7f501a33fe nodeName:}" failed. No retries permitted until 2025-09-30 20:00:29.492461539 +0000 UTC m=+831.430920447 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs") pod "frr-k8s-4s59z" (UID: "6d18b2f5-97ae-442e-af33-cc7f501a33fe") : secret "frr-k8s-certs-secret" not found Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992524 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-conf\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.992827 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-conf\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:28 crc kubenswrapper[4603]: I0930 20:00:28.993393 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6d18b2f5-97ae-442e-af33-cc7f501a33fe-frr-startup\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.016131 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gdzg\" (UniqueName: \"kubernetes.io/projected/6d18b2f5-97ae-442e-af33-cc7f501a33fe-kube-api-access-6gdzg\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.019280 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xpkf\" (UniqueName: \"kubernetes.io/projected/a308f483-402e-4254-a1c8-440883cde4b9-kube-api-access-8xpkf\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093454 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093509 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metallb-excludel2\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093532 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093575 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4fzz\" (UniqueName: \"kubernetes.io/projected/4c195024-2cbe-4d5a-93f9-9cf1d5380440-kube-api-access-g4fzz\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093621 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k782k\" (UniqueName: \"kubernetes.io/projected/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-kube-api-access-k782k\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093641 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-cert\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.093668 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-metrics-certs\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.094105 4603 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.094121 4603 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.094204 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist podName:ead35fe0-3ed6-4cb2-943c-1f3609f978d3 nodeName:}" failed. No retries permitted until 2025-09-30 20:00:29.594184834 +0000 UTC m=+831.532643652 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist") pod "speaker-87tdm" (UID: "ead35fe0-3ed6-4cb2-943c-1f3609f978d3") : secret "metallb-memberlist" not found Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.094221 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs podName:ead35fe0-3ed6-4cb2-943c-1f3609f978d3 nodeName:}" failed. No retries permitted until 2025-09-30 20:00:29.594214865 +0000 UTC m=+831.532673683 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs") pod "speaker-87tdm" (UID: "ead35fe0-3ed6-4cb2-943c-1f3609f978d3") : secret "speaker-certs-secret" not found Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.094806 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metallb-excludel2\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.110717 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-cert\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.115282 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k782k\" (UniqueName: \"kubernetes.io/projected/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-kube-api-access-k782k\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.116798 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c195024-2cbe-4d5a-93f9-9cf1d5380440-metrics-certs\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.123845 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4fzz\" (UniqueName: \"kubernetes.io/projected/4c195024-2cbe-4d5a-93f9-9cf1d5380440-kube-api-access-g4fzz\") pod \"controller-5d688f5ffc-fg62p\" (UID: \"4c195024-2cbe-4d5a-93f9-9cf1d5380440\") " pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.132012 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.499409 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.499926 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.503451 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6d18b2f5-97ae-442e-af33-cc7f501a33fe-metrics-certs\") pod \"frr-k8s-4s59z\" (UID: \"6d18b2f5-97ae-442e-af33-cc7f501a33fe\") " pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.505800 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a308f483-402e-4254-a1c8-440883cde4b9-cert\") pod \"frr-k8s-webhook-server-5478bdb765-hv78l\" (UID: \"a308f483-402e-4254-a1c8-440883cde4b9\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.558055 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-fg62p"] Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.600609 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.601360 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.601767 4603 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 20:00:29 crc kubenswrapper[4603]: E0930 20:00:29.601870 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist podName:ead35fe0-3ed6-4cb2-943c-1f3609f978d3 nodeName:}" failed. No retries permitted until 2025-09-30 20:00:30.601839709 +0000 UTC m=+832.540298557 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist") pod "speaker-87tdm" (UID: "ead35fe0-3ed6-4cb2-943c-1f3609f978d3") : secret "metallb-memberlist" not found Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.605026 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-metrics-certs\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.633453 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:29 crc kubenswrapper[4603]: I0930 20:00:29.640974 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.075927 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l"] Sep 30 20:00:30 crc kubenswrapper[4603]: W0930 20:00:30.097667 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda308f483_402e_4254_a1c8_440883cde4b9.slice/crio-343d36880bdb4ed742052cd9056ab638c3c9ff2df3ac7cad7f86b809129bdd26 WatchSource:0}: Error finding container 343d36880bdb4ed742052cd9056ab638c3c9ff2df3ac7cad7f86b809129bdd26: Status 404 returned error can't find the container with id 343d36880bdb4ed742052cd9056ab638c3c9ff2df3ac7cad7f86b809129bdd26 Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.401813 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"f3d4136f9a250f2bd095a318aa545469f62f45370350f2f10a4e48725e70362e"} Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.404031 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-fg62p" event={"ID":"4c195024-2cbe-4d5a-93f9-9cf1d5380440","Type":"ContainerStarted","Data":"5e12227679c582b0c3b8603a2eb438a2059b3e293b47ea87219a6fc956853e19"} Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.404105 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-fg62p" event={"ID":"4c195024-2cbe-4d5a-93f9-9cf1d5380440","Type":"ContainerStarted","Data":"bf8452651f4d4f5c69a0d99871b91664051bd6715936b6c8f5bba478fb201d11"} Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.404131 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-fg62p" event={"ID":"4c195024-2cbe-4d5a-93f9-9cf1d5380440","Type":"ContainerStarted","Data":"5084a4eb90308b6985c27594afda649ac0930e9faf6c9cd878cc5a9a487eb7ab"} Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.404203 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.405512 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" event={"ID":"a308f483-402e-4254-a1c8-440883cde4b9","Type":"ContainerStarted","Data":"343d36880bdb4ed742052cd9056ab638c3c9ff2df3ac7cad7f86b809129bdd26"} Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.405788 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mnzds" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="registry-server" containerID="cri-o://f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb" gracePeriod=2 Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.425702 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-fg62p" podStartSLOduration=2.425683387 podStartE2EDuration="2.425683387s" podCreationTimestamp="2025-09-30 20:00:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:00:30.421559211 +0000 UTC m=+832.360018029" watchObservedRunningTime="2025-09-30 20:00:30.425683387 +0000 UTC m=+832.364142205" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.613434 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.625645 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ead35fe0-3ed6-4cb2-943c-1f3609f978d3-memberlist\") pod \"speaker-87tdm\" (UID: \"ead35fe0-3ed6-4cb2-943c-1f3609f978d3\") " pod="metallb-system/speaker-87tdm" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.873941 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:30 crc kubenswrapper[4603]: I0930 20:00:30.919927 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-87tdm" Sep 30 20:00:30 crc kubenswrapper[4603]: W0930 20:00:30.936243 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podead35fe0_3ed6_4cb2_943c_1f3609f978d3.slice/crio-34036eb47c4e77a96540a44db0b6b528332da6ead4ec72043e4e733afa438151 WatchSource:0}: Error finding container 34036eb47c4e77a96540a44db0b6b528332da6ead4ec72043e4e733afa438151: Status 404 returned error can't find the container with id 34036eb47c4e77a96540a44db0b6b528332da6ead4ec72043e4e733afa438151 Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.024497 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities\") pod \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.024535 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxpf4\" (UniqueName: \"kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4\") pod \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.024563 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content\") pod \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\" (UID: \"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17\") " Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.025295 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities" (OuterVolumeSpecName: "utilities") pod "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" (UID: "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.028081 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4" (OuterVolumeSpecName: "kube-api-access-nxpf4") pod "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" (UID: "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17"). InnerVolumeSpecName "kube-api-access-nxpf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.124548 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" (UID: "a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.126904 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.126927 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxpf4\" (UniqueName: \"kubernetes.io/projected/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-kube-api-access-nxpf4\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.126937 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.414452 4603 generic.go:334] "Generic (PLEG): container finished" podID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerID="f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb" exitCode=0 Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.414596 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerDied","Data":"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb"} Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.414888 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mnzds" event={"ID":"a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17","Type":"ContainerDied","Data":"8fdfdf7b68440f9345de6e009ea1f12c5e936d2d32a33bb17f735e275299ce02"} Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.414910 4603 scope.go:117] "RemoveContainer" containerID="f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.414703 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mnzds" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.422687 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-87tdm" event={"ID":"ead35fe0-3ed6-4cb2-943c-1f3609f978d3","Type":"ContainerStarted","Data":"e4269f1f3ca6686d11cbec41f963d8d2f746a0e6a856940009e8b21e88e0a7a2"} Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.422722 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-87tdm" event={"ID":"ead35fe0-3ed6-4cb2-943c-1f3609f978d3","Type":"ContainerStarted","Data":"34036eb47c4e77a96540a44db0b6b528332da6ead4ec72043e4e733afa438151"} Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.442543 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.446700 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mnzds"] Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.454540 4603 scope.go:117] "RemoveContainer" containerID="de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.527258 4603 scope.go:117] "RemoveContainer" containerID="59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.566323 4603 scope.go:117] "RemoveContainer" containerID="f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb" Sep 30 20:00:31 crc kubenswrapper[4603]: E0930 20:00:31.569643 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb\": container with ID starting with f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb not found: ID does not exist" containerID="f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.569699 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb"} err="failed to get container status \"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb\": rpc error: code = NotFound desc = could not find container \"f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb\": container with ID starting with f9bccc5ad04cfcc78938dbca479c7b96d75383fe015d5012559957051ed210eb not found: ID does not exist" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.569730 4603 scope.go:117] "RemoveContainer" containerID="de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925" Sep 30 20:00:31 crc kubenswrapper[4603]: E0930 20:00:31.580345 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925\": container with ID starting with de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925 not found: ID does not exist" containerID="de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.580399 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925"} err="failed to get container status \"de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925\": rpc error: code = NotFound desc = could not find container \"de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925\": container with ID starting with de35800952428d3bc3ab8c20e18f6ee97c48d0838449ffbce1c7e08d3bbc7925 not found: ID does not exist" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.580430 4603 scope.go:117] "RemoveContainer" containerID="59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89" Sep 30 20:00:31 crc kubenswrapper[4603]: E0930 20:00:31.581430 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89\": container with ID starting with 59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89 not found: ID does not exist" containerID="59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89" Sep 30 20:00:31 crc kubenswrapper[4603]: I0930 20:00:31.581453 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89"} err="failed to get container status \"59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89\": rpc error: code = NotFound desc = could not find container \"59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89\": container with ID starting with 59a51b104466140f44bb35bdeb38d358bcdce81b47cbd662fb7996abde014f89 not found: ID does not exist" Sep 30 20:00:32 crc kubenswrapper[4603]: I0930 20:00:32.430684 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-87tdm" event={"ID":"ead35fe0-3ed6-4cb2-943c-1f3609f978d3","Type":"ContainerStarted","Data":"a88e7f1fa1d5c63093aa4e1b1bb241981cb20a6c729193b214120e84365320ff"} Sep 30 20:00:32 crc kubenswrapper[4603]: I0930 20:00:32.431192 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-87tdm" Sep 30 20:00:32 crc kubenswrapper[4603]: I0930 20:00:32.453196 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-87tdm" podStartSLOduration=4.453181652 podStartE2EDuration="4.453181652s" podCreationTimestamp="2025-09-30 20:00:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:00:32.452716059 +0000 UTC m=+834.391174877" watchObservedRunningTime="2025-09-30 20:00:32.453181652 +0000 UTC m=+834.391640470" Sep 30 20:00:32 crc kubenswrapper[4603]: I0930 20:00:32.773227 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" path="/var/lib/kubelet/pods/a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17/volumes" Sep 30 20:00:39 crc kubenswrapper[4603]: I0930 20:00:39.135987 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-fg62p" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.927218 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:00:40 crc kubenswrapper[4603]: E0930 20:00:40.927763 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="extract-utilities" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.927777 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="extract-utilities" Sep 30 20:00:40 crc kubenswrapper[4603]: E0930 20:00:40.927968 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="registry-server" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.927974 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="registry-server" Sep 30 20:00:40 crc kubenswrapper[4603]: E0930 20:00:40.927985 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="extract-content" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.927991 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="extract-content" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.928108 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6ac0c67-4adb-4ab3-b5ee-e08a1e14df17" containerName="registry-server" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.928953 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:40 crc kubenswrapper[4603]: I0930 20:00:40.938418 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.004070 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.004138 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.004211 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96ql5\" (UniqueName: \"kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.105431 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96ql5\" (UniqueName: \"kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.105533 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.105572 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.106116 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.106230 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.127324 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96ql5\" (UniqueName: \"kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5\") pod \"redhat-marketplace-xrzr7\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.297044 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.498130 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" event={"ID":"a308f483-402e-4254-a1c8-440883cde4b9","Type":"ContainerStarted","Data":"fa3dc1ff46dc9c645e792473e13795f2fad2581d7bba84638ccd4a835a790573"} Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.498459 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.500768 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerDied","Data":"b6c86483e2438265cc2659c3f1b1d454756c2b88a35d465317722d8dc8745c8c"} Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.501233 4603 generic.go:334] "Generic (PLEG): container finished" podID="6d18b2f5-97ae-442e-af33-cc7f501a33fe" containerID="b6c86483e2438265cc2659c3f1b1d454756c2b88a35d465317722d8dc8745c8c" exitCode=0 Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.548284 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" podStartSLOduration=2.9361760009999998 podStartE2EDuration="13.54826502s" podCreationTimestamp="2025-09-30 20:00:28 +0000 UTC" firstStartedPulling="2025-09-30 20:00:30.099520546 +0000 UTC m=+832.037979364" lastFinishedPulling="2025-09-30 20:00:40.711609555 +0000 UTC m=+842.650068383" observedRunningTime="2025-09-30 20:00:41.52105025 +0000 UTC m=+843.459509068" watchObservedRunningTime="2025-09-30 20:00:41.54826502 +0000 UTC m=+843.486723838" Sep 30 20:00:41 crc kubenswrapper[4603]: I0930 20:00:41.621899 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:00:42 crc kubenswrapper[4603]: I0930 20:00:42.516359 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerStarted","Data":"5326f9795c93cd6926a229cfd91a639bd47a1a72b6fe3b77e5a7e7a1fe654f2e"} Sep 30 20:00:43 crc kubenswrapper[4603]: I0930 20:00:43.526764 4603 generic.go:334] "Generic (PLEG): container finished" podID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerID="a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264" exitCode=0 Sep 30 20:00:43 crc kubenswrapper[4603]: I0930 20:00:43.526877 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerDied","Data":"a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264"} Sep 30 20:00:43 crc kubenswrapper[4603]: I0930 20:00:43.531382 4603 generic.go:334] "Generic (PLEG): container finished" podID="6d18b2f5-97ae-442e-af33-cc7f501a33fe" containerID="ae68baffd50e7109e6939f090b446db0f6fe5d34989c629c6d2f24013bd2a714" exitCode=0 Sep 30 20:00:43 crc kubenswrapper[4603]: I0930 20:00:43.531421 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerDied","Data":"ae68baffd50e7109e6939f090b446db0f6fe5d34989c629c6d2f24013bd2a714"} Sep 30 20:00:44 crc kubenswrapper[4603]: I0930 20:00:44.545348 4603 generic.go:334] "Generic (PLEG): container finished" podID="6d18b2f5-97ae-442e-af33-cc7f501a33fe" containerID="9f5b5878d4233a3ff5417a51afcce1f6ace94569c3e1566b932065a75e940a28" exitCode=0 Sep 30 20:00:44 crc kubenswrapper[4603]: I0930 20:00:44.545397 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerDied","Data":"9f5b5878d4233a3ff5417a51afcce1f6ace94569c3e1566b932065a75e940a28"} Sep 30 20:00:45 crc kubenswrapper[4603]: I0930 20:00:45.558802 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"2ff5cdd2e4a9fa4d422044cb39d78d0f317d00ad6dfdca3f5317a652e12dbc1f"} Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.569208 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"d94e3a86d817dc2d74315f7e0a56d443fbd72955445e2d1266a65824ea4d2ab2"} Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.569259 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"5134e17aa8ec19446db74eac518337c07c337d639edad02c81d7dcc8707af267"} Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.569273 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"55e799619e73f08cac8298a0f1209b653308722d3a0353bd95064b1ed5edad11"} Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.569285 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"87384e444c68b62d69a13933eb2d0cb9ca9fbc05a59fc53a911220a713b0f301"} Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.578348 4603 generic.go:334] "Generic (PLEG): container finished" podID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerID="4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b" exitCode=0 Sep 30 20:00:46 crc kubenswrapper[4603]: I0930 20:00:46.578397 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerDied","Data":"4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b"} Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.220722 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.224520 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.246084 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.300906 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.300965 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.301054 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs47v\" (UniqueName: \"kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.401502 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.401552 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.401617 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs47v\" (UniqueName: \"kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.402044 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.402257 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.429928 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs47v\" (UniqueName: \"kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v\") pod \"certified-operators-c6kg6\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.550447 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.587418 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s59z" event={"ID":"6d18b2f5-97ae-442e-af33-cc7f501a33fe","Type":"ContainerStarted","Data":"54dac029ff0f9d1671d08d3ae327ba5fa32603ade92afe13a94bdca5957d2f5b"} Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.587593 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:47 crc kubenswrapper[4603]: I0930 20:00:47.630854 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4s59z" podStartSLOduration=8.746848076 podStartE2EDuration="19.630839798s" podCreationTimestamp="2025-09-30 20:00:28 +0000 UTC" firstStartedPulling="2025-09-30 20:00:29.892704053 +0000 UTC m=+831.831162881" lastFinishedPulling="2025-09-30 20:00:40.776695785 +0000 UTC m=+842.715154603" observedRunningTime="2025-09-30 20:00:47.628130453 +0000 UTC m=+849.566589271" watchObservedRunningTime="2025-09-30 20:00:47.630839798 +0000 UTC m=+849.569298616" Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.197658 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:00:48 crc kubenswrapper[4603]: W0930 20:00:48.214495 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57c95571_e582_4ffe_9015_7ba5798c5e02.slice/crio-18240bcba73996ef89cc8222b093cccb7a1554fbc145b886d1cd8f8d0eacd1b1 WatchSource:0}: Error finding container 18240bcba73996ef89cc8222b093cccb7a1554fbc145b886d1cd8f8d0eacd1b1: Status 404 returned error can't find the container with id 18240bcba73996ef89cc8222b093cccb7a1554fbc145b886d1cd8f8d0eacd1b1 Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.595017 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerStarted","Data":"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737"} Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.597613 4603 generic.go:334] "Generic (PLEG): container finished" podID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerID="11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d" exitCode=0 Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.597650 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerDied","Data":"11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d"} Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.597688 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerStarted","Data":"18240bcba73996ef89cc8222b093cccb7a1554fbc145b886d1cd8f8d0eacd1b1"} Sep 30 20:00:48 crc kubenswrapper[4603]: I0930 20:00:48.624019 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xrzr7" podStartSLOduration=4.46248014 podStartE2EDuration="8.624002929s" podCreationTimestamp="2025-09-30 20:00:40 +0000 UTC" firstStartedPulling="2025-09-30 20:00:43.528543915 +0000 UTC m=+845.467002753" lastFinishedPulling="2025-09-30 20:00:47.690066734 +0000 UTC m=+849.628525542" observedRunningTime="2025-09-30 20:00:48.619444042 +0000 UTC m=+850.557902860" watchObservedRunningTime="2025-09-30 20:00:48.624002929 +0000 UTC m=+850.562461747" Sep 30 20:00:49 crc kubenswrapper[4603]: I0930 20:00:49.642140 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:49 crc kubenswrapper[4603]: I0930 20:00:49.692709 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:50 crc kubenswrapper[4603]: I0930 20:00:50.616577 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerStarted","Data":"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923"} Sep 30 20:00:50 crc kubenswrapper[4603]: I0930 20:00:50.926265 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-87tdm" Sep 30 20:00:51 crc kubenswrapper[4603]: I0930 20:00:51.299689 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:51 crc kubenswrapper[4603]: I0930 20:00:51.299964 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:51 crc kubenswrapper[4603]: I0930 20:00:51.373307 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:00:51 crc kubenswrapper[4603]: I0930 20:00:51.628484 4603 generic.go:334] "Generic (PLEG): container finished" podID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerID="f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923" exitCode=0 Sep 30 20:00:51 crc kubenswrapper[4603]: I0930 20:00:51.629224 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerDied","Data":"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923"} Sep 30 20:00:52 crc kubenswrapper[4603]: I0930 20:00:52.639609 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerStarted","Data":"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a"} Sep 30 20:00:52 crc kubenswrapper[4603]: I0930 20:00:52.659734 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c6kg6" podStartSLOduration=2.190550442 podStartE2EDuration="5.659713271s" podCreationTimestamp="2025-09-30 20:00:47 +0000 UTC" firstStartedPulling="2025-09-30 20:00:48.599070762 +0000 UTC m=+850.537529580" lastFinishedPulling="2025-09-30 20:00:52.068233561 +0000 UTC m=+854.006692409" observedRunningTime="2025-09-30 20:00:52.657592632 +0000 UTC m=+854.596051460" watchObservedRunningTime="2025-09-30 20:00:52.659713271 +0000 UTC m=+854.598172089" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.229354 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.231611 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.233900 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.234029 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.234326 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-ct6zj" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.238828 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.298807 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kksn6\" (UniqueName: \"kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6\") pod \"openstack-operator-index-sr4m4\" (UID: \"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2\") " pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.399784 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kksn6\" (UniqueName: \"kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6\") pod \"openstack-operator-index-sr4m4\" (UID: \"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2\") " pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.427988 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kksn6\" (UniqueName: \"kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6\") pod \"openstack-operator-index-sr4m4\" (UID: \"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2\") " pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.550985 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.551100 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.563015 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.604268 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.737924 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:00:57 crc kubenswrapper[4603]: I0930 20:00:57.990803 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:00:58 crc kubenswrapper[4603]: W0930 20:00:58.001156 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podede12c38_ce5e_4cee_ba9d_99fdeb18d1a2.slice/crio-2850ae991f09c914b5e2910babde3787296c1aae8949b4a97c14bba61b07ef42 WatchSource:0}: Error finding container 2850ae991f09c914b5e2910babde3787296c1aae8949b4a97c14bba61b07ef42: Status 404 returned error can't find the container with id 2850ae991f09c914b5e2910babde3787296c1aae8949b4a97c14bba61b07ef42 Sep 30 20:00:58 crc kubenswrapper[4603]: I0930 20:00:58.610439 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:00:58 crc kubenswrapper[4603]: I0930 20:00:58.694448 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sr4m4" event={"ID":"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2","Type":"ContainerStarted","Data":"2850ae991f09c914b5e2910babde3787296c1aae8949b4a97c14bba61b07ef42"} Sep 30 20:00:59 crc kubenswrapper[4603]: I0930 20:00:59.636932 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-hv78l" Sep 30 20:00:59 crc kubenswrapper[4603]: I0930 20:00:59.644914 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4s59z" Sep 30 20:00:59 crc kubenswrapper[4603]: I0930 20:00:59.701989 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c6kg6" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="registry-server" containerID="cri-o://508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a" gracePeriod=2 Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.459729 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.545386 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities\") pod \"57c95571-e582-4ffe-9015-7ba5798c5e02\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.545524 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content\") pod \"57c95571-e582-4ffe-9015-7ba5798c5e02\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.545612 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fs47v\" (UniqueName: \"kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v\") pod \"57c95571-e582-4ffe-9015-7ba5798c5e02\" (UID: \"57c95571-e582-4ffe-9015-7ba5798c5e02\") " Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.547495 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities" (OuterVolumeSpecName: "utilities") pod "57c95571-e582-4ffe-9015-7ba5798c5e02" (UID: "57c95571-e582-4ffe-9015-7ba5798c5e02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.553603 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v" (OuterVolumeSpecName: "kube-api-access-fs47v") pod "57c95571-e582-4ffe-9015-7ba5798c5e02" (UID: "57c95571-e582-4ffe-9015-7ba5798c5e02"). InnerVolumeSpecName "kube-api-access-fs47v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.647928 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.647986 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fs47v\" (UniqueName: \"kubernetes.io/projected/57c95571-e582-4ffe-9015-7ba5798c5e02-kube-api-access-fs47v\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.712381 4603 generic.go:334] "Generic (PLEG): container finished" podID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerID="508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a" exitCode=0 Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.712430 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerDied","Data":"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a"} Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.712452 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6kg6" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.712474 4603 scope.go:117] "RemoveContainer" containerID="508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a" Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.712459 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6kg6" event={"ID":"57c95571-e582-4ffe-9015-7ba5798c5e02","Type":"ContainerDied","Data":"18240bcba73996ef89cc8222b093cccb7a1554fbc145b886d1cd8f8d0eacd1b1"} Sep 30 20:01:00 crc kubenswrapper[4603]: I0930 20:01:00.963244 4603 scope.go:117] "RemoveContainer" containerID="f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923" Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.354560 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.447627 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57c95571-e582-4ffe-9015-7ba5798c5e02" (UID: "57c95571-e582-4ffe-9015-7ba5798c5e02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.456945 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c95571-e582-4ffe-9015-7ba5798c5e02-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.666693 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.672743 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c6kg6"] Sep 30 20:01:01 crc kubenswrapper[4603]: I0930 20:01:01.698644 4603 scope.go:117] "RemoveContainer" containerID="11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d" Sep 30 20:01:02 crc kubenswrapper[4603]: I0930 20:01:02.415556 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:01:02 crc kubenswrapper[4603]: I0930 20:01:02.773112 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" path="/var/lib/kubelet/pods/57c95571-e582-4ffe-9015-7ba5798c5e02/volumes" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.015607 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-zgzz6"] Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.015893 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="extract-content" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.015909 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="extract-content" Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.015928 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="extract-utilities" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.015935 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="extract-utilities" Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.016022 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="registry-server" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.016034 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="registry-server" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.016178 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c95571-e582-4ffe-9015-7ba5798c5e02" containerName="registry-server" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.016704 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.024430 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zgzz6"] Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.085076 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mstf6\" (UniqueName: \"kubernetes.io/projected/62cd7c09-702d-4432-a6ef-89900b8d4705-kube-api-access-mstf6\") pod \"openstack-operator-index-zgzz6\" (UID: \"62cd7c09-702d-4432-a6ef-89900b8d4705\") " pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.132140 4603 scope.go:117] "RemoveContainer" containerID="508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a" Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.132604 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a\": container with ID starting with 508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a not found: ID does not exist" containerID="508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.132638 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a"} err="failed to get container status \"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a\": rpc error: code = NotFound desc = could not find container \"508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a\": container with ID starting with 508617a468100bb341802ad7636569aab966e280ac645d6345cae01c4a89d19a not found: ID does not exist" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.132659 4603 scope.go:117] "RemoveContainer" containerID="f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923" Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.132931 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923\": container with ID starting with f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923 not found: ID does not exist" containerID="f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.132956 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923"} err="failed to get container status \"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923\": rpc error: code = NotFound desc = could not find container \"f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923\": container with ID starting with f2ce45474698afb4d3fdc4c2905fbabf6fd27dd0d6e02368b190cdf91a59b923 not found: ID does not exist" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.132984 4603 scope.go:117] "RemoveContainer" containerID="11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d" Sep 30 20:01:03 crc kubenswrapper[4603]: E0930 20:01:03.133288 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d\": container with ID starting with 11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d not found: ID does not exist" containerID="11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.133319 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d"} err="failed to get container status \"11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d\": rpc error: code = NotFound desc = could not find container \"11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d\": container with ID starting with 11314c2c8c55349242582277d386f77538db186108b5bf60b5bddada29bf516d not found: ID does not exist" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.186601 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mstf6\" (UniqueName: \"kubernetes.io/projected/62cd7c09-702d-4432-a6ef-89900b8d4705-kube-api-access-mstf6\") pod \"openstack-operator-index-zgzz6\" (UID: \"62cd7c09-702d-4432-a6ef-89900b8d4705\") " pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.204871 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mstf6\" (UniqueName: \"kubernetes.io/projected/62cd7c09-702d-4432-a6ef-89900b8d4705-kube-api-access-mstf6\") pod \"openstack-operator-index-zgzz6\" (UID: \"62cd7c09-702d-4432-a6ef-89900b8d4705\") " pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:03 crc kubenswrapper[4603]: I0930 20:01:03.346795 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.263208 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-zgzz6"] Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.746923 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zgzz6" event={"ID":"62cd7c09-702d-4432-a6ef-89900b8d4705","Type":"ContainerStarted","Data":"9fd7f7dc96803a0329ef78404279e3fe51ad38ad5eebb77dabc293bc2908075a"} Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.747311 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-zgzz6" event={"ID":"62cd7c09-702d-4432-a6ef-89900b8d4705","Type":"ContainerStarted","Data":"b0810ccb595820e51dbd99011631b5ece1fcf744f927138b8ddd2198cfa4474e"} Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.748995 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sr4m4" event={"ID":"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2","Type":"ContainerStarted","Data":"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee"} Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.749110 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-sr4m4" podUID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" containerName="registry-server" containerID="cri-o://6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee" gracePeriod=2 Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.789492 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-sr4m4" podStartSLOduration=1.8528397380000001 podStartE2EDuration="7.789469996s" podCreationTimestamp="2025-09-30 20:00:57 +0000 UTC" firstStartedPulling="2025-09-30 20:00:58.003849598 +0000 UTC m=+859.942308426" lastFinishedPulling="2025-09-30 20:01:03.940479866 +0000 UTC m=+865.878938684" observedRunningTime="2025-09-30 20:01:04.786860413 +0000 UTC m=+866.725319241" watchObservedRunningTime="2025-09-30 20:01:04.789469996 +0000 UTC m=+866.727928814" Sep 30 20:01:04 crc kubenswrapper[4603]: I0930 20:01:04.790916 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-zgzz6" podStartSLOduration=1.74277248 podStartE2EDuration="1.790909266s" podCreationTimestamp="2025-09-30 20:01:03 +0000 UTC" firstStartedPulling="2025-09-30 20:01:04.283492527 +0000 UTC m=+866.221951365" lastFinishedPulling="2025-09-30 20:01:04.331629323 +0000 UTC m=+866.270088151" observedRunningTime="2025-09-30 20:01:04.762712808 +0000 UTC m=+866.701171626" watchObservedRunningTime="2025-09-30 20:01:04.790909266 +0000 UTC m=+866.729368084" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.125183 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.216213 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kksn6\" (UniqueName: \"kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6\") pod \"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2\" (UID: \"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2\") " Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.223865 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6" (OuterVolumeSpecName: "kube-api-access-kksn6") pod "ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" (UID: "ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2"). InnerVolumeSpecName "kube-api-access-kksn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.317693 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kksn6\" (UniqueName: \"kubernetes.io/projected/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2-kube-api-access-kksn6\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.758838 4603 generic.go:334] "Generic (PLEG): container finished" podID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" containerID="6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee" exitCode=0 Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.759049 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sr4m4" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.759114 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sr4m4" event={"ID":"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2","Type":"ContainerDied","Data":"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee"} Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.759188 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sr4m4" event={"ID":"ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2","Type":"ContainerDied","Data":"2850ae991f09c914b5e2910babde3787296c1aae8949b4a97c14bba61b07ef42"} Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.759220 4603 scope.go:117] "RemoveContainer" containerID="6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.802937 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.810906 4603 scope.go:117] "RemoveContainer" containerID="6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee" Sep 30 20:01:05 crc kubenswrapper[4603]: E0930 20:01:05.812761 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee\": container with ID starting with 6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee not found: ID does not exist" containerID="6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.812998 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee"} err="failed to get container status \"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee\": rpc error: code = NotFound desc = could not find container \"6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee\": container with ID starting with 6414cdecf211a331ea4111a6b3ee6b92769530defa96e257595225f00c23d1ee not found: ID does not exist" Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.813517 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-sr4m4"] Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.816783 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:01:05 crc kubenswrapper[4603]: I0930 20:01:05.817346 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xrzr7" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="registry-server" containerID="cri-o://7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737" gracePeriod=2 Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.271731 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.329057 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96ql5\" (UniqueName: \"kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5\") pod \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.329147 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities\") pod \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.329310 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content\") pod \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\" (UID: \"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419\") " Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.330532 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities" (OuterVolumeSpecName: "utilities") pod "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" (UID: "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.334637 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.336906 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5" (OuterVolumeSpecName: "kube-api-access-96ql5") pod "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" (UID: "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419"). InnerVolumeSpecName "kube-api-access-96ql5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.352935 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" (UID: "e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.435282 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96ql5\" (UniqueName: \"kubernetes.io/projected/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-kube-api-access-96ql5\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.435354 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.771094 4603 generic.go:334] "Generic (PLEG): container finished" podID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerID="7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737" exitCode=0 Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.771525 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrzr7" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.778537 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" path="/var/lib/kubelet/pods/ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2/volumes" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.779362 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerDied","Data":"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737"} Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.779401 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrzr7" event={"ID":"e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419","Type":"ContainerDied","Data":"5326f9795c93cd6926a229cfd91a639bd47a1a72b6fe3b77e5a7e7a1fe654f2e"} Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.779468 4603 scope.go:117] "RemoveContainer" containerID="7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.821308 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.822972 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrzr7"] Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.826955 4603 scope.go:117] "RemoveContainer" containerID="4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.865562 4603 scope.go:117] "RemoveContainer" containerID="a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.884835 4603 scope.go:117] "RemoveContainer" containerID="7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737" Sep 30 20:01:06 crc kubenswrapper[4603]: E0930 20:01:06.885481 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737\": container with ID starting with 7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737 not found: ID does not exist" containerID="7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.885559 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737"} err="failed to get container status \"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737\": rpc error: code = NotFound desc = could not find container \"7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737\": container with ID starting with 7ae0fa107ec9b595bb39b5fe909a54f082053a550612419b6eddcda184819737 not found: ID does not exist" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.885613 4603 scope.go:117] "RemoveContainer" containerID="4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b" Sep 30 20:01:06 crc kubenswrapper[4603]: E0930 20:01:06.886383 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b\": container with ID starting with 4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b not found: ID does not exist" containerID="4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.886433 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b"} err="failed to get container status \"4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b\": rpc error: code = NotFound desc = could not find container \"4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b\": container with ID starting with 4ec322063fa7740a7f81b05c6f62f81bede31e41c8e8586493b89daab6b4024b not found: ID does not exist" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.886461 4603 scope.go:117] "RemoveContainer" containerID="a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264" Sep 30 20:01:06 crc kubenswrapper[4603]: E0930 20:01:06.888828 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264\": container with ID starting with a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264 not found: ID does not exist" containerID="a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264" Sep 30 20:01:06 crc kubenswrapper[4603]: I0930 20:01:06.888868 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264"} err="failed to get container status \"a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264\": rpc error: code = NotFound desc = could not find container \"a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264\": container with ID starting with a3a3dab8617f1e4cef15afaee9794563cc8108c551429325aa1eb80ba28bd264 not found: ID does not exist" Sep 30 20:01:08 crc kubenswrapper[4603]: I0930 20:01:08.442508 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:01:08 crc kubenswrapper[4603]: I0930 20:01:08.442605 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:01:08 crc kubenswrapper[4603]: I0930 20:01:08.776697 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" path="/var/lib/kubelet/pods/e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419/volumes" Sep 30 20:01:13 crc kubenswrapper[4603]: I0930 20:01:13.347279 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:13 crc kubenswrapper[4603]: I0930 20:01:13.347354 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:13 crc kubenswrapper[4603]: I0930 20:01:13.382869 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:13 crc kubenswrapper[4603]: I0930 20:01:13.856802 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-zgzz6" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.259581 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4"] Sep 30 20:01:18 crc kubenswrapper[4603]: E0930 20:01:18.260322 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="extract-content" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260334 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="extract-content" Sep 30 20:01:18 crc kubenswrapper[4603]: E0930 20:01:18.260343 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260349 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: E0930 20:01:18.260363 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260369 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: E0930 20:01:18.260388 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="extract-utilities" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260394 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="extract-utilities" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260529 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="ede12c38-ce5e-4cee-ba9d-99fdeb18d1a2" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.260538 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ee1ab7-3bd1-40da-9be1-0f61ec0d0419" containerName="registry-server" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.261500 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.270285 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4"] Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.271052 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-lmp7r" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.314576 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.314739 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.314771 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xxh7\" (UniqueName: \"kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.415703 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.415797 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xxh7\" (UniqueName: \"kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.415869 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.416572 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.416660 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.442924 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xxh7\" (UniqueName: \"kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7\") pod \"050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:18 crc kubenswrapper[4603]: I0930 20:01:18.589000 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:19 crc kubenswrapper[4603]: I0930 20:01:19.170888 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4"] Sep 30 20:01:19 crc kubenswrapper[4603]: W0930 20:01:19.182757 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6941dd13_432d_4bb3_a789_54d719d95d42.slice/crio-c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a WatchSource:0}: Error finding container c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a: Status 404 returned error can't find the container with id c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a Sep 30 20:01:19 crc kubenswrapper[4603]: I0930 20:01:19.866809 4603 generic.go:334] "Generic (PLEG): container finished" podID="6941dd13-432d-4bb3-a789-54d719d95d42" containerID="870ccba1b20fa0ba88f5f5dd6656b456a89700525b94c93eb69695854ee19614" exitCode=0 Sep 30 20:01:19 crc kubenswrapper[4603]: I0930 20:01:19.867026 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" event={"ID":"6941dd13-432d-4bb3-a789-54d719d95d42","Type":"ContainerDied","Data":"870ccba1b20fa0ba88f5f5dd6656b456a89700525b94c93eb69695854ee19614"} Sep 30 20:01:19 crc kubenswrapper[4603]: I0930 20:01:19.867113 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" event={"ID":"6941dd13-432d-4bb3-a789-54d719d95d42","Type":"ContainerStarted","Data":"c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a"} Sep 30 20:01:20 crc kubenswrapper[4603]: I0930 20:01:20.874013 4603 generic.go:334] "Generic (PLEG): container finished" podID="6941dd13-432d-4bb3-a789-54d719d95d42" containerID="662ad56ca0ef365774fe4d66204a8d6523cbd10225f086e1744497f954137652" exitCode=0 Sep 30 20:01:20 crc kubenswrapper[4603]: I0930 20:01:20.874240 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" event={"ID":"6941dd13-432d-4bb3-a789-54d719d95d42","Type":"ContainerDied","Data":"662ad56ca0ef365774fe4d66204a8d6523cbd10225f086e1744497f954137652"} Sep 30 20:01:21 crc kubenswrapper[4603]: I0930 20:01:21.887466 4603 generic.go:334] "Generic (PLEG): container finished" podID="6941dd13-432d-4bb3-a789-54d719d95d42" containerID="8601bf3bd85279d85e2337aeb85379773fe6d581d1f7e899020dbdec48eba1a6" exitCode=0 Sep 30 20:01:21 crc kubenswrapper[4603]: I0930 20:01:21.887537 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" event={"ID":"6941dd13-432d-4bb3-a789-54d719d95d42","Type":"ContainerDied","Data":"8601bf3bd85279d85e2337aeb85379773fe6d581d1f7e899020dbdec48eba1a6"} Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.124849 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.294628 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xxh7\" (UniqueName: \"kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7\") pod \"6941dd13-432d-4bb3-a789-54d719d95d42\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.294749 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle\") pod \"6941dd13-432d-4bb3-a789-54d719d95d42\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.294814 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util\") pod \"6941dd13-432d-4bb3-a789-54d719d95d42\" (UID: \"6941dd13-432d-4bb3-a789-54d719d95d42\") " Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.298468 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle" (OuterVolumeSpecName: "bundle") pod "6941dd13-432d-4bb3-a789-54d719d95d42" (UID: "6941dd13-432d-4bb3-a789-54d719d95d42"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.305236 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7" (OuterVolumeSpecName: "kube-api-access-9xxh7") pod "6941dd13-432d-4bb3-a789-54d719d95d42" (UID: "6941dd13-432d-4bb3-a789-54d719d95d42"). InnerVolumeSpecName "kube-api-access-9xxh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.321475 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util" (OuterVolumeSpecName: "util") pod "6941dd13-432d-4bb3-a789-54d719d95d42" (UID: "6941dd13-432d-4bb3-a789-54d719d95d42"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.396965 4603 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.397014 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xxh7\" (UniqueName: \"kubernetes.io/projected/6941dd13-432d-4bb3-a789-54d719d95d42-kube-api-access-9xxh7\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.397034 4603 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6941dd13-432d-4bb3-a789-54d719d95d42-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.916451 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" event={"ID":"6941dd13-432d-4bb3-a789-54d719d95d42","Type":"ContainerDied","Data":"c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a"} Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.916500 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7fd0adc7280b125b34f97eceeb59299baf8e923ad2c1727211031b73b04f36a" Sep 30 20:01:23 crc kubenswrapper[4603]: I0930 20:01:23.916582 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.298124 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl"] Sep 30 20:01:26 crc kubenswrapper[4603]: E0930 20:01:26.298377 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="util" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.298388 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="util" Sep 30 20:01:26 crc kubenswrapper[4603]: E0930 20:01:26.298402 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="pull" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.298408 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="pull" Sep 30 20:01:26 crc kubenswrapper[4603]: E0930 20:01:26.298428 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="extract" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.298435 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="extract" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.298536 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="6941dd13-432d-4bb3-a789-54d719d95d42" containerName="extract" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.299079 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.310723 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-4jcnx" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.346462 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl"] Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.349589 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh5g9\" (UniqueName: \"kubernetes.io/projected/e52d3ccd-fc80-4261-9043-2def9da416b6-kube-api-access-gh5g9\") pod \"openstack-operator-controller-operator-7dc7f48c86-wfrdl\" (UID: \"e52d3ccd-fc80-4261-9043-2def9da416b6\") " pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.450052 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh5g9\" (UniqueName: \"kubernetes.io/projected/e52d3ccd-fc80-4261-9043-2def9da416b6-kube-api-access-gh5g9\") pod \"openstack-operator-controller-operator-7dc7f48c86-wfrdl\" (UID: \"e52d3ccd-fc80-4261-9043-2def9da416b6\") " pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.468600 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh5g9\" (UniqueName: \"kubernetes.io/projected/e52d3ccd-fc80-4261-9043-2def9da416b6-kube-api-access-gh5g9\") pod \"openstack-operator-controller-operator-7dc7f48c86-wfrdl\" (UID: \"e52d3ccd-fc80-4261-9043-2def9da416b6\") " pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:26 crc kubenswrapper[4603]: I0930 20:01:26.613862 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:27 crc kubenswrapper[4603]: I0930 20:01:27.182484 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl"] Sep 30 20:01:27 crc kubenswrapper[4603]: I0930 20:01:27.954540 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" event={"ID":"e52d3ccd-fc80-4261-9043-2def9da416b6","Type":"ContainerStarted","Data":"dab5d64a827ee0af9425dd5d0feef82b3adcf2a3e1b750e764ad1d764a12622f"} Sep 30 20:01:31 crc kubenswrapper[4603]: I0930 20:01:31.977114 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" event={"ID":"e52d3ccd-fc80-4261-9043-2def9da416b6","Type":"ContainerStarted","Data":"a1bbf18547e270eabd0e633643ceb8d8be88ad8ea96cf8b3582b1f2fb5735af3"} Sep 30 20:01:37 crc kubenswrapper[4603]: I0930 20:01:37.018155 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" event={"ID":"e52d3ccd-fc80-4261-9043-2def9da416b6","Type":"ContainerStarted","Data":"b4c8b293696d040744e04d1697d290fc5ca65227e9753320669dd49a1ee4a293"} Sep 30 20:01:37 crc kubenswrapper[4603]: I0930 20:01:37.019071 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:37 crc kubenswrapper[4603]: I0930 20:01:37.021789 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" Sep 30 20:01:37 crc kubenswrapper[4603]: I0930 20:01:37.065077 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7dc7f48c86-wfrdl" podStartSLOduration=2.006960942 podStartE2EDuration="11.065052826s" podCreationTimestamp="2025-09-30 20:01:26 +0000 UTC" firstStartedPulling="2025-09-30 20:01:27.169115193 +0000 UTC m=+889.107574001" lastFinishedPulling="2025-09-30 20:01:36.227207057 +0000 UTC m=+898.165665885" observedRunningTime="2025-09-30 20:01:37.056681722 +0000 UTC m=+898.995140570" watchObservedRunningTime="2025-09-30 20:01:37.065052826 +0000 UTC m=+899.003511654" Sep 30 20:01:38 crc kubenswrapper[4603]: I0930 20:01:38.441856 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:01:38 crc kubenswrapper[4603]: I0930 20:01:38.441940 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:02:08 crc kubenswrapper[4603]: I0930 20:02:08.441980 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:02:08 crc kubenswrapper[4603]: I0930 20:02:08.442731 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:02:08 crc kubenswrapper[4603]: I0930 20:02:08.442793 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:02:08 crc kubenswrapper[4603]: I0930 20:02:08.443638 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:02:08 crc kubenswrapper[4603]: I0930 20:02:08.443733 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51" gracePeriod=600 Sep 30 20:02:09 crc kubenswrapper[4603]: I0930 20:02:09.257192 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51" exitCode=0 Sep 30 20:02:09 crc kubenswrapper[4603]: I0930 20:02:09.257279 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51"} Sep 30 20:02:09 crc kubenswrapper[4603]: I0930 20:02:09.257526 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7"} Sep 30 20:02:09 crc kubenswrapper[4603]: I0930 20:02:09.257549 4603 scope.go:117] "RemoveContainer" containerID="05cf39c3e6164e950de0c0a2ea34fdbafffd40b648666f0546fe20dc69272087" Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.907997 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq"] Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.909786 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.913915 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zk9df" Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.927810 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq"] Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.931133 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn"] Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.932064 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.939200 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn"] Sep 30 20:02:12 crc kubenswrapper[4603]: I0930 20:02:12.943869 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-5jw5h" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:12.997867 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:12.998838 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:12.999105 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.000302 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.005379 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-pknwg" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.005612 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-n2s7t" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.010625 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.024586 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.032205 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.033065 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.033337 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt2mf\" (UniqueName: \"kubernetes.io/projected/e17e463e-0a04-457a-a014-480772f91871-kube-api-access-nt2mf\") pod \"barbican-operator-controller-manager-6ff8b75857-qgzmq\" (UID: \"e17e463e-0a04-457a-a014-480772f91871\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.033409 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6882\" (UniqueName: \"kubernetes.io/projected/b9de699a-42fd-40f8-94e3-ccddd9f2e6c2-kube-api-access-b6882\") pod \"cinder-operator-controller-manager-644bddb6d8-62mwn\" (UID: \"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.035801 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-r8v2r" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.050516 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.089224 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.090487 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.094898 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.100417 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-fgrxs" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.108517 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.108632 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.112908 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.115558 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-fslsh" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.115727 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.132098 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.141809 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6882\" (UniqueName: \"kubernetes.io/projected/b9de699a-42fd-40f8-94e3-ccddd9f2e6c2-kube-api-access-b6882\") pod \"cinder-operator-controller-manager-644bddb6d8-62mwn\" (UID: \"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.141866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7ckz\" (UniqueName: \"kubernetes.io/projected/3e6da4be-f92f-48ee-85e4-f316da7f6e27-kube-api-access-z7ckz\") pod \"glance-operator-controller-manager-84958c4d49-p6fms\" (UID: \"3e6da4be-f92f-48ee-85e4-f316da7f6e27\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.141905 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt2mf\" (UniqueName: \"kubernetes.io/projected/e17e463e-0a04-457a-a014-480772f91871-kube-api-access-nt2mf\") pod \"barbican-operator-controller-manager-6ff8b75857-qgzmq\" (UID: \"e17e463e-0a04-457a-a014-480772f91871\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.141952 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bndb5\" (UniqueName: \"kubernetes.io/projected/63389a19-bdd5-4862-a0b0-f93a5df19823-kube-api-access-bndb5\") pod \"designate-operator-controller-manager-84f4f7b77b-kn7c7\" (UID: \"63389a19-bdd5-4862-a0b0-f93a5df19823\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.142008 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz2ps\" (UniqueName: \"kubernetes.io/projected/9caa0cb4-2c14-430d-ac4a-942c78ec844e-kube-api-access-tz2ps\") pod \"heat-operator-controller-manager-5d889d78cf-2m98j\" (UID: \"9caa0cb4-2c14-430d-ac4a-942c78ec844e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.159493 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.172709 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-ffxpw" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.211104 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt2mf\" (UniqueName: \"kubernetes.io/projected/e17e463e-0a04-457a-a014-480772f91871-kube-api-access-nt2mf\") pod \"barbican-operator-controller-manager-6ff8b75857-qgzmq\" (UID: \"e17e463e-0a04-457a-a014-480772f91871\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.213483 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.217150 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.226246 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6882\" (UniqueName: \"kubernetes.io/projected/b9de699a-42fd-40f8-94e3-ccddd9f2e6c2-kube-api-access-b6882\") pod \"cinder-operator-controller-manager-644bddb6d8-62mwn\" (UID: \"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.226417 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-fk7bv" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.226907 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.246996 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248195 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bndb5\" (UniqueName: \"kubernetes.io/projected/63389a19-bdd5-4862-a0b0-f93a5df19823-kube-api-access-bndb5\") pod \"designate-operator-controller-manager-84f4f7b77b-kn7c7\" (UID: \"63389a19-bdd5-4862-a0b0-f93a5df19823\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248241 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnhtq\" (UniqueName: \"kubernetes.io/projected/3fb36813-9cc2-4668-ad3a-da10b9594f8a-kube-api-access-wnhtq\") pod \"horizon-operator-controller-manager-9f4696d94-s29xr\" (UID: \"3fb36813-9cc2-4668-ad3a-da10b9594f8a\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248309 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz2ps\" (UniqueName: \"kubernetes.io/projected/9caa0cb4-2c14-430d-ac4a-942c78ec844e-kube-api-access-tz2ps\") pod \"heat-operator-controller-manager-5d889d78cf-2m98j\" (UID: \"9caa0cb4-2c14-430d-ac4a-942c78ec844e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248348 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248388 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s26fq\" (UniqueName: \"kubernetes.io/projected/e0158f35-7f0f-4c77-b761-6b624fc675f0-kube-api-access-s26fq\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248419 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7ckz\" (UniqueName: \"kubernetes.io/projected/3e6da4be-f92f-48ee-85e4-f316da7f6e27-kube-api-access-z7ckz\") pod \"glance-operator-controller-manager-84958c4d49-p6fms\" (UID: \"3e6da4be-f92f-48ee-85e4-f316da7f6e27\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.248450 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sblwg\" (UniqueName: \"kubernetes.io/projected/5b507d22-1613-4e76-948f-e4d55f160473-kube-api-access-sblwg\") pod \"ironic-operator-controller-manager-7975b88857-n2qf4\" (UID: \"5b507d22-1613-4e76-948f-e4d55f160473\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.261019 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.305141 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7ckz\" (UniqueName: \"kubernetes.io/projected/3e6da4be-f92f-48ee-85e4-f316da7f6e27-kube-api-access-z7ckz\") pod \"glance-operator-controller-manager-84958c4d49-p6fms\" (UID: \"3e6da4be-f92f-48ee-85e4-f316da7f6e27\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.310428 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz2ps\" (UniqueName: \"kubernetes.io/projected/9caa0cb4-2c14-430d-ac4a-942c78ec844e-kube-api-access-tz2ps\") pod \"heat-operator-controller-manager-5d889d78cf-2m98j\" (UID: \"9caa0cb4-2c14-430d-ac4a-942c78ec844e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.314664 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bndb5\" (UniqueName: \"kubernetes.io/projected/63389a19-bdd5-4862-a0b0-f93a5df19823-kube-api-access-bndb5\") pod \"designate-operator-controller-manager-84f4f7b77b-kn7c7\" (UID: \"63389a19-bdd5-4862-a0b0-f93a5df19823\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.314722 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.315705 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.316502 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.322290 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-pzgp7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.327525 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.351957 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnhtq\" (UniqueName: \"kubernetes.io/projected/3fb36813-9cc2-4668-ad3a-da10b9594f8a-kube-api-access-wnhtq\") pod \"horizon-operator-controller-manager-9f4696d94-s29xr\" (UID: \"3fb36813-9cc2-4668-ad3a-da10b9594f8a\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.353561 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.353708 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s26fq\" (UniqueName: \"kubernetes.io/projected/e0158f35-7f0f-4c77-b761-6b624fc675f0-kube-api-access-s26fq\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.355012 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sblwg\" (UniqueName: \"kubernetes.io/projected/5b507d22-1613-4e76-948f-e4d55f160473-kube-api-access-sblwg\") pod \"ironic-operator-controller-manager-7975b88857-n2qf4\" (UID: \"5b507d22-1613-4e76-948f-e4d55f160473\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.355190 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fwfv\" (UniqueName: \"kubernetes.io/projected/ac8d681e-168c-401e-9529-54098a214435-kube-api-access-9fwfv\") pod \"keystone-operator-controller-manager-5bd55b4bff-bxvz5\" (UID: \"ac8d681e-168c-401e-9529-54098a214435\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.355691 4603 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.355851 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert podName:e0158f35-7f0f-4c77-b761-6b624fc675f0 nodeName:}" failed. No retries permitted until 2025-09-30 20:02:13.85580022 +0000 UTC m=+935.794259038 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert") pod "infra-operator-controller-manager-9d6c5db85-pszrb" (UID: "e0158f35-7f0f-4c77-b761-6b624fc675f0") : secret "infra-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.357021 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.369175 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-fd845"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.370296 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.374756 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-gjj27" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.402761 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.407052 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s26fq\" (UniqueName: \"kubernetes.io/projected/e0158f35-7f0f-4c77-b761-6b624fc675f0-kube-api-access-s26fq\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.415609 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sblwg\" (UniqueName: \"kubernetes.io/projected/5b507d22-1613-4e76-948f-e4d55f160473-kube-api-access-sblwg\") pod \"ironic-operator-controller-manager-7975b88857-n2qf4\" (UID: \"5b507d22-1613-4e76-948f-e4d55f160473\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.417238 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnhtq\" (UniqueName: \"kubernetes.io/projected/3fb36813-9cc2-4668-ad3a-da10b9594f8a-kube-api-access-wnhtq\") pod \"horizon-operator-controller-manager-9f4696d94-s29xr\" (UID: \"3fb36813-9cc2-4668-ad3a-da10b9594f8a\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.424202 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.443424 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-fd845"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.458837 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqvnw\" (UniqueName: \"kubernetes.io/projected/17b3f01d-c7ac-4b96-a90b-02c645fa27ed-kube-api-access-lqvnw\") pod \"mariadb-operator-controller-manager-88c7-fd845\" (UID: \"17b3f01d-c7ac-4b96-a90b-02c645fa27ed\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.458881 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngdzq\" (UniqueName: \"kubernetes.io/projected/961055da-fa39-4301-b30d-f0a61d41371a-kube-api-access-ngdzq\") pod \"manila-operator-controller-manager-6d68dbc695-mg4sm\" (UID: \"961055da-fa39-4301-b30d-f0a61d41371a\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.458949 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fwfv\" (UniqueName: \"kubernetes.io/projected/ac8d681e-168c-401e-9529-54098a214435-kube-api-access-9fwfv\") pod \"keystone-operator-controller-manager-5bd55b4bff-bxvz5\" (UID: \"ac8d681e-168c-401e-9529-54098a214435\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.460314 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.462288 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.481246 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.488505 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.489666 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.490143 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.499998 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-slsfx" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.500940 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.501091 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-kv9k2" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.511719 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fwfv\" (UniqueName: \"kubernetes.io/projected/ac8d681e-168c-401e-9529-54098a214435-kube-api-access-9fwfv\") pod \"keystone-operator-controller-manager-5bd55b4bff-bxvz5\" (UID: \"ac8d681e-168c-401e-9529-54098a214435\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.525235 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.561926 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjq6x\" (UniqueName: \"kubernetes.io/projected/45381319-4688-4802-a937-e804b3d0e6b1-kube-api-access-hjq6x\") pod \"neutron-operator-controller-manager-64d7b59854-6cvcn\" (UID: \"45381319-4688-4802-a937-e804b3d0e6b1\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.561970 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqvnw\" (UniqueName: \"kubernetes.io/projected/17b3f01d-c7ac-4b96-a90b-02c645fa27ed-kube-api-access-lqvnw\") pod \"mariadb-operator-controller-manager-88c7-fd845\" (UID: \"17b3f01d-c7ac-4b96-a90b-02c645fa27ed\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.562001 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngdzq\" (UniqueName: \"kubernetes.io/projected/961055da-fa39-4301-b30d-f0a61d41371a-kube-api-access-ngdzq\") pod \"manila-operator-controller-manager-6d68dbc695-mg4sm\" (UID: \"961055da-fa39-4301-b30d-f0a61d41371a\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.562546 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpm92\" (UniqueName: \"kubernetes.io/projected/3a7017fa-c8d0-493d-a338-ec3d2626a289-kube-api-access-lpm92\") pod \"nova-operator-controller-manager-c7c776c96-6l5w5\" (UID: \"3a7017fa-c8d0-493d-a338-ec3d2626a289\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.587699 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.589250 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.601661 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.601834 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-qklv9" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.603899 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.607150 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.608608 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-2tvx2" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.614553 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngdzq\" (UniqueName: \"kubernetes.io/projected/961055da-fa39-4301-b30d-f0a61d41371a-kube-api-access-ngdzq\") pod \"manila-operator-controller-manager-6d68dbc695-mg4sm\" (UID: \"961055da-fa39-4301-b30d-f0a61d41371a\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.627069 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqvnw\" (UniqueName: \"kubernetes.io/projected/17b3f01d-c7ac-4b96-a90b-02c645fa27ed-kube-api-access-lqvnw\") pod \"mariadb-operator-controller-manager-88c7-fd845\" (UID: \"17b3f01d-c7ac-4b96-a90b-02c645fa27ed\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.645307 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.666863 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.666910 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.667449 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.667807 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.668422 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.668459 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpm92\" (UniqueName: \"kubernetes.io/projected/3a7017fa-c8d0-493d-a338-ec3d2626a289-kube-api-access-lpm92\") pod \"nova-operator-controller-manager-c7c776c96-6l5w5\" (UID: \"3a7017fa-c8d0-493d-a338-ec3d2626a289\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.668491 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jsrh\" (UniqueName: \"kubernetes.io/projected/e6858f69-2a71-4459-89d4-59939c74b778-kube-api-access-2jsrh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.668538 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjq6x\" (UniqueName: \"kubernetes.io/projected/45381319-4688-4802-a937-e804b3d0e6b1-kube-api-access-hjq6x\") pod \"neutron-operator-controller-manager-64d7b59854-6cvcn\" (UID: \"45381319-4688-4802-a937-e804b3d0e6b1\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.676804 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-q5mvh" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.680936 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.682481 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.683365 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.685776 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.702597 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-bphbj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.704703 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.705456 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.705740 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.708936 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xtttg" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.709269 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-w42qh" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.721756 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpm92\" (UniqueName: \"kubernetes.io/projected/3a7017fa-c8d0-493d-a338-ec3d2626a289-kube-api-access-lpm92\") pod \"nova-operator-controller-manager-c7c776c96-6l5w5\" (UID: \"3a7017fa-c8d0-493d-a338-ec3d2626a289\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.722028 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.729089 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjq6x\" (UniqueName: \"kubernetes.io/projected/45381319-4688-4802-a937-e804b3d0e6b1-kube-api-access-hjq6x\") pod \"neutron-operator-controller-manager-64d7b59854-6cvcn\" (UID: \"45381319-4688-4802-a937-e804b3d0e6b1\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.743226 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-h8j45"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.744213 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.752078 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-rhnl7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770130 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv86z\" (UniqueName: \"kubernetes.io/projected/453ca8a5-9f93-4ad9-a0ef-14858d949b08-kube-api-access-vv86z\") pod \"ovn-operator-controller-manager-9976ff44c-lcsw7\" (UID: \"453ca8a5-9f93-4ad9-a0ef-14858d949b08\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770187 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xvnl\" (UniqueName: \"kubernetes.io/projected/ae8ee517-97d6-422e-a058-c229d111e654-kube-api-access-5xvnl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-28kqj\" (UID: \"ae8ee517-97d6-422e-a058-c229d111e654\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770217 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48hng\" (UniqueName: \"kubernetes.io/projected/e0835976-81c4-4f6f-aad4-0af0341168e2-kube-api-access-48hng\") pod \"placement-operator-controller-manager-589c58c6c-dqmkb\" (UID: \"e0835976-81c4-4f6f-aad4-0af0341168e2\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770248 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7pv5\" (UniqueName: \"kubernetes.io/projected/3a536984-9465-496f-9cfb-f48e32bd0c1b-kube-api-access-m7pv5\") pod \"swift-operator-controller-manager-bc7dc7bd9-z7l94\" (UID: \"3a536984-9465-496f-9cfb-f48e32bd0c1b\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770269 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770312 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jsrh\" (UniqueName: \"kubernetes.io/projected/e6858f69-2a71-4459-89d4-59939c74b778-kube-api-access-2jsrh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.770333 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n4fz\" (UniqueName: \"kubernetes.io/projected/e262072a-1f18-48fa-a2af-73466cc9a40b-kube-api-access-9n4fz\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fbmst\" (UID: \"e262072a-1f18-48fa-a2af-73466cc9a40b\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.770473 4603 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.770512 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert podName:e6858f69-2a71-4459-89d4-59939c74b778 nodeName:}" failed. No retries permitted until 2025-09-30 20:02:14.270498917 +0000 UTC m=+936.208957735 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-gcb85" (UID: "e6858f69-2a71-4459-89d4-59939c74b778") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.778933 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.780434 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.781501 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.790764 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-q7c6j" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.821348 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-h8j45"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.833678 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.835673 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.838587 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jsrh\" (UniqueName: \"kubernetes.io/projected/e6858f69-2a71-4459-89d4-59939c74b778-kube-api-access-2jsrh\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.865551 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.878693 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886227 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp94j\" (UniqueName: \"kubernetes.io/projected/58ddfa7e-b740-4d7d-ba1e-22d3c81a5870-kube-api-access-rp94j\") pod \"watcher-operator-controller-manager-76669f99c-2gvcj\" (UID: \"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886279 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7pv5\" (UniqueName: \"kubernetes.io/projected/3a536984-9465-496f-9cfb-f48e32bd0c1b-kube-api-access-m7pv5\") pod \"swift-operator-controller-manager-bc7dc7bd9-z7l94\" (UID: \"3a536984-9465-496f-9cfb-f48e32bd0c1b\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886360 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n4fz\" (UniqueName: \"kubernetes.io/projected/e262072a-1f18-48fa-a2af-73466cc9a40b-kube-api-access-9n4fz\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fbmst\" (UID: \"e262072a-1f18-48fa-a2af-73466cc9a40b\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886388 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cwhr\" (UniqueName: \"kubernetes.io/projected/c1669984-9655-488e-a243-0a48f9e381c1-kube-api-access-4cwhr\") pod \"test-operator-controller-manager-f66b554c6-h8j45\" (UID: \"c1669984-9655-488e-a243-0a48f9e381c1\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886410 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886432 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv86z\" (UniqueName: \"kubernetes.io/projected/453ca8a5-9f93-4ad9-a0ef-14858d949b08-kube-api-access-vv86z\") pod \"ovn-operator-controller-manager-9976ff44c-lcsw7\" (UID: \"453ca8a5-9f93-4ad9-a0ef-14858d949b08\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886456 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xvnl\" (UniqueName: \"kubernetes.io/projected/ae8ee517-97d6-422e-a058-c229d111e654-kube-api-access-5xvnl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-28kqj\" (UID: \"ae8ee517-97d6-422e-a058-c229d111e654\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.886483 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48hng\" (UniqueName: \"kubernetes.io/projected/e0835976-81c4-4f6f-aad4-0af0341168e2-kube-api-access-48hng\") pod \"placement-operator-controller-manager-589c58c6c-dqmkb\" (UID: \"e0835976-81c4-4f6f-aad4-0af0341168e2\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.886974 4603 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: E0930 20:02:13.887007 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert podName:e0158f35-7f0f-4c77-b761-6b624fc675f0 nodeName:}" failed. No retries permitted until 2025-09-30 20:02:14.886995525 +0000 UTC m=+936.825454333 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert") pod "infra-operator-controller-manager-9d6c5db85-pszrb" (UID: "e0158f35-7f0f-4c77-b761-6b624fc675f0") : secret "infra-operator-webhook-server-cert" not found Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.899506 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.964517 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb"] Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.981476 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv86z\" (UniqueName: \"kubernetes.io/projected/453ca8a5-9f93-4ad9-a0ef-14858d949b08-kube-api-access-vv86z\") pod \"ovn-operator-controller-manager-9976ff44c-lcsw7\" (UID: \"453ca8a5-9f93-4ad9-a0ef-14858d949b08\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.987902 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cwhr\" (UniqueName: \"kubernetes.io/projected/c1669984-9655-488e-a243-0a48f9e381c1-kube-api-access-4cwhr\") pod \"test-operator-controller-manager-f66b554c6-h8j45\" (UID: \"c1669984-9655-488e-a243-0a48f9e381c1\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:13 crc kubenswrapper[4603]: I0930 20:02:13.988023 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp94j\" (UniqueName: \"kubernetes.io/projected/58ddfa7e-b740-4d7d-ba1e-22d3c81a5870-kube-api-access-rp94j\") pod \"watcher-operator-controller-manager-76669f99c-2gvcj\" (UID: \"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.017452 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xvnl\" (UniqueName: \"kubernetes.io/projected/ae8ee517-97d6-422e-a058-c229d111e654-kube-api-access-5xvnl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-28kqj\" (UID: \"ae8ee517-97d6-422e-a058-c229d111e654\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.039418 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7pv5\" (UniqueName: \"kubernetes.io/projected/3a536984-9465-496f-9cfb-f48e32bd0c1b-kube-api-access-m7pv5\") pod \"swift-operator-controller-manager-bc7dc7bd9-z7l94\" (UID: \"3a536984-9465-496f-9cfb-f48e32bd0c1b\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.048933 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.067279 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cwhr\" (UniqueName: \"kubernetes.io/projected/c1669984-9655-488e-a243-0a48f9e381c1-kube-api-access-4cwhr\") pod \"test-operator-controller-manager-f66b554c6-h8j45\" (UID: \"c1669984-9655-488e-a243-0a48f9e381c1\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.067893 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n4fz\" (UniqueName: \"kubernetes.io/projected/e262072a-1f18-48fa-a2af-73466cc9a40b-kube-api-access-9n4fz\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fbmst\" (UID: \"e262072a-1f18-48fa-a2af-73466cc9a40b\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.074936 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp94j\" (UniqueName: \"kubernetes.io/projected/58ddfa7e-b740-4d7d-ba1e-22d3c81a5870-kube-api-access-rp94j\") pod \"watcher-operator-controller-manager-76669f99c-2gvcj\" (UID: \"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.081675 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48hng\" (UniqueName: \"kubernetes.io/projected/e0835976-81c4-4f6f-aad4-0af0341168e2-kube-api-access-48hng\") pod \"placement-operator-controller-manager-589c58c6c-dqmkb\" (UID: \"e0835976-81c4-4f6f-aad4-0af0341168e2\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.082220 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.166335 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.178241 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.179491 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.183196 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-n9r9d" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.183492 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.202749 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.277711 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.278746 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.285401 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.292770 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.298221 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-j4sd6" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.301890 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64gm2\" (UniqueName: \"kubernetes.io/projected/0ef59238-520a-4221-8a49-40a4e1a1049d-kube-api-access-64gm2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x\" (UID: \"0ef59238-520a-4221-8a49-40a4e1a1049d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.301963 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.302010 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw8ml\" (UniqueName: \"kubernetes.io/projected/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-kube-api-access-fw8ml\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.302031 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: E0930 20:02:14.302179 4603 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:02:14 crc kubenswrapper[4603]: E0930 20:02:14.302222 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert podName:e6858f69-2a71-4459-89d4-59939c74b778 nodeName:}" failed. No retries permitted until 2025-09-30 20:02:15.302207625 +0000 UTC m=+937.240666443 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-gcb85" (UID: "e6858f69-2a71-4459-89d4-59939c74b778") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.302846 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.312731 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.355550 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.404481 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw8ml\" (UniqueName: \"kubernetes.io/projected/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-kube-api-access-fw8ml\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.404551 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.404629 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64gm2\" (UniqueName: \"kubernetes.io/projected/0ef59238-520a-4221-8a49-40a4e1a1049d-kube-api-access-64gm2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x\" (UID: \"0ef59238-520a-4221-8a49-40a4e1a1049d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" Sep 30 20:02:14 crc kubenswrapper[4603]: E0930 20:02:14.406069 4603 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 20:02:14 crc kubenswrapper[4603]: E0930 20:02:14.406122 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert podName:7c79bcd3-52f0-4d6b-8814-65ccfe3e9577 nodeName:}" failed. No retries permitted until 2025-09-30 20:02:14.906106931 +0000 UTC m=+936.844565749 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert") pod "openstack-operator-controller-manager-78964744f9-tbqf5" (UID: "7c79bcd3-52f0-4d6b-8814-65ccfe3e9577") : secret "webhook-server-cert" not found Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.428973 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64gm2\" (UniqueName: \"kubernetes.io/projected/0ef59238-520a-4221-8a49-40a4e1a1049d-kube-api-access-64gm2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x\" (UID: \"0ef59238-520a-4221-8a49-40a4e1a1049d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.442750 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw8ml\" (UniqueName: \"kubernetes.io/projected/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-kube-api-access-fw8ml\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.569278 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.713598 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.758415 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.914210 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.914273 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.920744 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7c79bcd3-52f0-4d6b-8814-65ccfe3e9577-cert\") pod \"openstack-operator-controller-manager-78964744f9-tbqf5\" (UID: \"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577\") " pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.922645 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e0158f35-7f0f-4c77-b761-6b624fc675f0-cert\") pod \"infra-operator-controller-manager-9d6c5db85-pszrb\" (UID: \"e0158f35-7f0f-4c77-b761-6b624fc675f0\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.947957 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5"] Sep 30 20:02:14 crc kubenswrapper[4603]: W0930 20:02:14.958554 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac8d681e_168c_401e_9529_54098a214435.slice/crio-033f74b279d2f20d8fb0e4b6353929be5f7b4cec5953fdd0fa724444acb660dd WatchSource:0}: Error finding container 033f74b279d2f20d8fb0e4b6353929be5f7b4cec5953fdd0fa724444acb660dd: Status 404 returned error can't find the container with id 033f74b279d2f20d8fb0e4b6353929be5f7b4cec5953fdd0fa724444acb660dd Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.961214 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.969021 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.981445 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms"] Sep 30 20:02:14 crc kubenswrapper[4603]: W0930 20:02:14.986424 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e6da4be_f92f_48ee_85e4_f316da7f6e27.slice/crio-6c92f2a02a83957c3d7e97a91a51b00b9d231975f0f426fc7f0c9f91a966a89b WatchSource:0}: Error finding container 6c92f2a02a83957c3d7e97a91a51b00b9d231975f0f426fc7f0c9f91a966a89b: Status 404 returned error can't find the container with id 6c92f2a02a83957c3d7e97a91a51b00b9d231975f0f426fc7f0c9f91a966a89b Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.989607 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq"] Sep 30 20:02:14 crc kubenswrapper[4603]: I0930 20:02:14.997233 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.119917 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.167965 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.185640 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.193783 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj"] Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.200306 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod961055da_fa39_4301_b30d_f0a61d41371a.slice/crio-ca4397582d0e2b6acc34244dbcfebaf8ebc1435a08e46ffa4c73c3218b04d09a WatchSource:0}: Error finding container ca4397582d0e2b6acc34244dbcfebaf8ebc1435a08e46ffa4c73c3218b04d09a: Status 404 returned error can't find the container with id ca4397582d0e2b6acc34244dbcfebaf8ebc1435a08e46ffa4c73c3218b04d09a Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.204591 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-fd845"] Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.209452 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45381319_4688_4802_a937_e804b3d0e6b1.slice/crio-27719dbfeb31d336f69b0c498409629e66f3bc3b2a9dfb0286d62a1088b73904 WatchSource:0}: Error finding container 27719dbfeb31d336f69b0c498409629e66f3bc3b2a9dfb0286d62a1088b73904: Status 404 returned error can't find the container with id 27719dbfeb31d336f69b0c498409629e66f3bc3b2a9dfb0286d62a1088b73904 Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.211161 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.217200 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst"] Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.218792 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode262072a_1f18_48fa_a2af_73466cc9a40b.slice/crio-981bb70b25639824dd7a0297fd1f239374b1e9aec7e37fa25f05e85b22848b49 WatchSource:0}: Error finding container 981bb70b25639824dd7a0297fd1f239374b1e9aec7e37fa25f05e85b22848b49: Status 404 returned error can't find the container with id 981bb70b25639824dd7a0297fd1f239374b1e9aec7e37fa25f05e85b22848b49 Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.238745 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae8ee517_97d6_422e_a058_c229d111e654.slice/crio-2895948cf600c7f28677a8b2713d95c10b43d65c1d94f03f81731dd8d8f91e7b WatchSource:0}: Error finding container 2895948cf600c7f28677a8b2713d95c10b43d65c1d94f03f81731dd8d8f91e7b: Status 404 returned error can't find the container with id 2895948cf600c7f28677a8b2713d95c10b43d65c1d94f03f81731dd8d8f91e7b Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.321939 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.346017 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6858f69-2a71-4459-89d4-59939c74b778-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-gcb85\" (UID: \"e6858f69-2a71-4459-89d4-59939c74b778\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.372187 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" event={"ID":"961055da-fa39-4301-b30d-f0a61d41371a","Type":"ContainerStarted","Data":"ca4397582d0e2b6acc34244dbcfebaf8ebc1435a08e46ffa4c73c3218b04d09a"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.374262 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" event={"ID":"e17e463e-0a04-457a-a014-480772f91871","Type":"ContainerStarted","Data":"d0595fc15475a555e2158b331fca4e906d4afadbcb4ed7baad1e7eee7a2677f7"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.376393 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" event={"ID":"3fb36813-9cc2-4668-ad3a-da10b9594f8a","Type":"ContainerStarted","Data":"3d8577c505dd3bb9d0fbb928f63fc46bb01731cfeda328ad48ed9e7b229a7c42"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.377469 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" event={"ID":"17b3f01d-c7ac-4b96-a90b-02c645fa27ed","Type":"ContainerStarted","Data":"addb78a7dc33327511d5aa8222934b88e30f480e1520f07a252ff27791808138"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.379514 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" event={"ID":"9caa0cb4-2c14-430d-ac4a-942c78ec844e","Type":"ContainerStarted","Data":"eedf726c557cc45f26dfe8e987a3fc35b11417c8efbbb6792c6f877bcbdd7e7d"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.382047 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" event={"ID":"63389a19-bdd5-4862-a0b0-f93a5df19823","Type":"ContainerStarted","Data":"14ba20820e658c20ca32e545e37cdaf8edbdf220137437901a1ee73422f97428"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.382830 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" event={"ID":"e262072a-1f18-48fa-a2af-73466cc9a40b","Type":"ContainerStarted","Data":"981bb70b25639824dd7a0297fd1f239374b1e9aec7e37fa25f05e85b22848b49"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.383567 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" event={"ID":"ae8ee517-97d6-422e-a058-c229d111e654","Type":"ContainerStarted","Data":"2895948cf600c7f28677a8b2713d95c10b43d65c1d94f03f81731dd8d8f91e7b"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.386290 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" event={"ID":"45381319-4688-4802-a937-e804b3d0e6b1","Type":"ContainerStarted","Data":"27719dbfeb31d336f69b0c498409629e66f3bc3b2a9dfb0286d62a1088b73904"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.387325 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" event={"ID":"5b507d22-1613-4e76-948f-e4d55f160473","Type":"ContainerStarted","Data":"51c80a2239816884b82ab2329ff574f5dc3cd7d31f4a4b8ff312c3065124d1a7"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.390995 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" event={"ID":"ac8d681e-168c-401e-9529-54098a214435","Type":"ContainerStarted","Data":"033f74b279d2f20d8fb0e4b6353929be5f7b4cec5953fdd0fa724444acb660dd"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.396733 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" event={"ID":"3e6da4be-f92f-48ee-85e4-f316da7f6e27","Type":"ContainerStarted","Data":"6c92f2a02a83957c3d7e97a91a51b00b9d231975f0f426fc7f0c9f91a966a89b"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.398029 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" event={"ID":"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2","Type":"ContainerStarted","Data":"538031e695cfaa3b8ec55997bfd85a1c539785ec5c1a66f4c99d3855c3a24006"} Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.434648 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.558680 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-h8j45"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.575451 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.586883 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.606765 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.613297 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x"] Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.619198 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4cwhr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-h8j45_openstack-operators(c1669984-9655-488e-a243-0a48f9e381c1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.630391 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a536984_9465_496f_9cfb_f48e32bd0c1b.slice/crio-9a26ed39c6fc160baa6b20bd42dcdac0090697b89d94e84b68235fa4586e3d7d WatchSource:0}: Error finding container 9a26ed39c6fc160baa6b20bd42dcdac0090697b89d94e84b68235fa4586e3d7d: Status 404 returned error can't find the container with id 9a26ed39c6fc160baa6b20bd42dcdac0090697b89d94e84b68235fa4586e3d7d Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.634353 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb"] Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.640695 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5"] Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.664946 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-48hng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-dqmkb_openstack-operators(e0835976-81c4-4f6f-aad4-0af0341168e2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.665568 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m7pv5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-z7l94_openstack-operators(3a536984-9465-496f-9cfb-f48e32bd0c1b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.673127 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb"] Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.693028 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c79bcd3_52f0_4d6b_8814_65ccfe3e9577.slice/crio-a19a8ca1dc931140d0583a67b05a724c34cd10c58657f727e438614873d1edee WatchSource:0}: Error finding container a19a8ca1dc931140d0583a67b05a724c34cd10c58657f727e438614873d1edee: Status 404 returned error can't find the container with id a19a8ca1dc931140d0583a67b05a724c34cd10c58657f727e438614873d1edee Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.708315 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5"] Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.712556 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-64gm2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x_openstack-operators(0ef59238-520a-4221-8a49-40a4e1a1049d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.722682 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lpm92,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-6l5w5_openstack-operators(3a7017fa-c8d0-493d-a338-ec3d2626a289): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.722797 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s26fq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-9d6c5db85-pszrb_openstack-operators(e0158f35-7f0f-4c77-b761-6b624fc675f0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.723070 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" podUID="0ef59238-520a-4221-8a49-40a4e1a1049d" Sep 30 20:02:15 crc kubenswrapper[4603]: I0930 20:02:15.729674 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85"] Sep 30 20:02:15 crc kubenswrapper[4603]: W0930 20:02:15.742105 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6858f69_2a71_4459_89d4_59939c74b778.slice/crio-bfd152d98b7d2503f66dea4ab1ee992e056011cacf0dda8bbdc561a9c239faf2 WatchSource:0}: Error finding container bfd152d98b7d2503f66dea4ab1ee992e056011cacf0dda8bbdc561a9c239faf2: Status 404 returned error can't find the container with id bfd152d98b7d2503f66dea4ab1ee992e056011cacf0dda8bbdc561a9c239faf2 Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.778715 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2jsrh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6d776955-gcb85_openstack-operators(e6858f69-2a71-4459-89d4-59939c74b778): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:02:15 crc kubenswrapper[4603]: E0930 20:02:15.932834 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" podUID="c1669984-9655-488e-a243-0a48f9e381c1" Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.027977 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" podUID="3a536984-9465-496f-9cfb-f48e32bd0c1b" Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.105096 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" podUID="e0835976-81c4-4f6f-aad4-0af0341168e2" Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.105525 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podUID="3a7017fa-c8d0-493d-a338-ec3d2626a289" Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.168279 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" podUID="e6858f69-2a71-4459-89d4-59939c74b778" Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.206573 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podUID="e0158f35-7f0f-4c77-b761-6b624fc675f0" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.420879 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" event={"ID":"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577","Type":"ContainerStarted","Data":"cc417507e82f7d070bed7431b106b0bacef0a04e64aa83188abb6aab9e419572"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.421237 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" event={"ID":"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577","Type":"ContainerStarted","Data":"a6151677f5f98461b0c7b779bc708e703da599d0c68c121a58b9f40e3d1f1da7"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.421266 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" event={"ID":"7c79bcd3-52f0-4d6b-8814-65ccfe3e9577","Type":"ContainerStarted","Data":"a19a8ca1dc931140d0583a67b05a724c34cd10c58657f727e438614873d1edee"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.422617 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.448007 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" event={"ID":"e6858f69-2a71-4459-89d4-59939c74b778","Type":"ContainerStarted","Data":"8f125b77e95fdd0d19c43672493dcd8e24c2e3f78cde1bc90748b42913fad396"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.448063 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" event={"ID":"e6858f69-2a71-4459-89d4-59939c74b778","Type":"ContainerStarted","Data":"bfd152d98b7d2503f66dea4ab1ee992e056011cacf0dda8bbdc561a9c239faf2"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.449871 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" podUID="e6858f69-2a71-4459-89d4-59939c74b778" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.455040 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" event={"ID":"0ef59238-520a-4221-8a49-40a4e1a1049d","Type":"ContainerStarted","Data":"d44d9f3c367a7a8929977fe6e3e4aede5fdebe58a75450f7732696959c251bf1"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.460401 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" podUID="0ef59238-520a-4221-8a49-40a4e1a1049d" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.460924 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" podStartSLOduration=3.46089482 podStartE2EDuration="3.46089482s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:02:16.460098158 +0000 UTC m=+938.398556976" watchObservedRunningTime="2025-09-30 20:02:16.46089482 +0000 UTC m=+938.399353638" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.517345 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" event={"ID":"e0835976-81c4-4f6f-aad4-0af0341168e2","Type":"ContainerStarted","Data":"eb875d798121a4b49b3e19c8d8350870fe4aa3c0a8d20b5eb234af579c6e5afc"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.517382 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" event={"ID":"e0835976-81c4-4f6f-aad4-0af0341168e2","Type":"ContainerStarted","Data":"574f4fb4b24ee2f72720987b5b8df3bcc01a26b1831735555d98e6ac380709a2"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.518500 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" podUID="e0835976-81c4-4f6f-aad4-0af0341168e2" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.520749 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" event={"ID":"453ca8a5-9f93-4ad9-a0ef-14858d949b08","Type":"ContainerStarted","Data":"9f00ef7617acd6c700c1cfddf99006747766dac50ba0cbbe7a57b56e7e480f6c"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.528703 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" event={"ID":"3a536984-9465-496f-9cfb-f48e32bd0c1b","Type":"ContainerStarted","Data":"ab4ae336981549b22ab973e90904c7d27a67d305bee5c98fd6de7875f6cc13b9"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.528746 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" event={"ID":"3a536984-9465-496f-9cfb-f48e32bd0c1b","Type":"ContainerStarted","Data":"9a26ed39c6fc160baa6b20bd42dcdac0090697b89d94e84b68235fa4586e3d7d"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.531493 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" podUID="3a536984-9465-496f-9cfb-f48e32bd0c1b" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.533125 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" event={"ID":"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870","Type":"ContainerStarted","Data":"5a66dbebcbdc9c0babec07e4f70b5eb9c10c711af457d3b62fd111ed8de419fd"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.541599 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" event={"ID":"c1669984-9655-488e-a243-0a48f9e381c1","Type":"ContainerStarted","Data":"3fdc9938fbb68e545b73e53d0d1e58db93e653def195daf6006e2cd3f0a56f80"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.541642 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" event={"ID":"c1669984-9655-488e-a243-0a48f9e381c1","Type":"ContainerStarted","Data":"210052fdcbf9f3ce68381c9660bb640b28641cccb6f9a3665428c65d68436f32"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.556385 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" podUID="c1669984-9655-488e-a243-0a48f9e381c1" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.578448 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" event={"ID":"3a7017fa-c8d0-493d-a338-ec3d2626a289","Type":"ContainerStarted","Data":"1e4c77a1a1117036aa1413b76b7f6f0694b91ce0cf34dd148460f8b945d3b14f"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.578482 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" event={"ID":"3a7017fa-c8d0-493d-a338-ec3d2626a289","Type":"ContainerStarted","Data":"4cf153f6f9b6641ce717694c47a16985f8c94e54954b8039fc8e5ffb1b0675c7"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.586333 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podUID="3a7017fa-c8d0-493d-a338-ec3d2626a289" Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.607004 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" event={"ID":"e0158f35-7f0f-4c77-b761-6b624fc675f0","Type":"ContainerStarted","Data":"b537d4bacca8ca0a566055707585afad1e252ed46753c1d1899997f5acb15e3b"} Sep 30 20:02:16 crc kubenswrapper[4603]: I0930 20:02:16.607053 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" event={"ID":"e0158f35-7f0f-4c77-b761-6b624fc675f0","Type":"ContainerStarted","Data":"90124c26ed635cfff539bad25e2b37a2f68e46e246aebc3ddadca4750b747292"} Sep 30 20:02:16 crc kubenswrapper[4603]: E0930 20:02:16.618729 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podUID="e0158f35-7f0f-4c77-b761-6b624fc675f0" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.631674 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" podUID="0ef59238-520a-4221-8a49-40a4e1a1049d" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.632031 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" podUID="3a536984-9465-496f-9cfb-f48e32bd0c1b" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.632665 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" podUID="e0835976-81c4-4f6f-aad4-0af0341168e2" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.632703 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" podUID="e6858f69-2a71-4459-89d4-59939c74b778" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.632750 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" podUID="c1669984-9655-488e-a243-0a48f9e381c1" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.633616 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podUID="3a7017fa-c8d0-493d-a338-ec3d2626a289" Sep 30 20:02:17 crc kubenswrapper[4603]: E0930 20:02:17.633703 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podUID="e0158f35-7f0f-4c77-b761-6b624fc675f0" Sep 30 20:02:25 crc kubenswrapper[4603]: I0930 20:02:25.127273 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-78964744f9-tbqf5" Sep 30 20:02:42 crc kubenswrapper[4603]: E0930 20:02:42.189087 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f" Sep 30 20:02:42 crc kubenswrapper[4603]: E0930 20:02:42.189781 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5xvnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-28kqj_openstack-operators(ae8ee517-97d6-422e-a058-c229d111e654): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:02:42 crc kubenswrapper[4603]: E0930 20:02:42.362125 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6" Sep 30 20:02:42 crc kubenswrapper[4603]: E0930 20:02:42.362356 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sblwg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-7975b88857-n2qf4_openstack-operators(5b507d22-1613-4e76-948f-e4d55f160473): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:02:43 crc kubenswrapper[4603]: E0930 20:02:43.243079 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10" Sep 30 20:02:43 crc kubenswrapper[4603]: E0930 20:02:43.243500 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hjq6x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-64d7b59854-6cvcn_openstack-operators(45381319-4688-4802-a937-e804b3d0e6b1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:02:44 crc kubenswrapper[4603]: E0930 20:02:44.114844 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" podUID="ae8ee517-97d6-422e-a058-c229d111e654" Sep 30 20:02:44 crc kubenswrapper[4603]: I0930 20:02:44.852753 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" event={"ID":"ae8ee517-97d6-422e-a058-c229d111e654","Type":"ContainerStarted","Data":"e09e686d749570a33e739d44313d8381a78f905feaac12f084de028cb72747b3"} Sep 30 20:02:44 crc kubenswrapper[4603]: E0930 20:02:44.855051 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" podUID="ae8ee517-97d6-422e-a058-c229d111e654" Sep 30 20:02:45 crc kubenswrapper[4603]: E0930 20:02:45.869199 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" podUID="ae8ee517-97d6-422e-a058-c229d111e654" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.295530 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.295982 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s26fq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-9d6c5db85-pszrb_openstack-operators(e0158f35-7f0f-4c77-b761-6b624fc675f0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.297157 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podUID="e0158f35-7f0f-4c77-b761-6b624fc675f0" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.909104 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.910075 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lpm92,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-6l5w5_openstack-operators(3a7017fa-c8d0-493d-a338-ec3d2626a289): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:02:54 crc kubenswrapper[4603]: E0930 20:02:54.911400 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podUID="3a7017fa-c8d0-493d-a338-ec3d2626a289" Sep 30 20:02:55 crc kubenswrapper[4603]: E0930 20:02:55.151685 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" podUID="45381319-4688-4802-a937-e804b3d0e6b1" Sep 30 20:02:55 crc kubenswrapper[4603]: E0930 20:02:55.162752 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" podUID="5b507d22-1613-4e76-948f-e4d55f160473" Sep 30 20:02:55 crc kubenswrapper[4603]: I0930 20:02:55.993194 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" event={"ID":"e17e463e-0a04-457a-a014-480772f91871","Type":"ContainerStarted","Data":"d80c28ddee8575f23a04c14deb9d5af08d4d1338c41aa4aa1294a2b129cb9fb0"} Sep 30 20:02:55 crc kubenswrapper[4603]: I0930 20:02:55.994890 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" event={"ID":"453ca8a5-9f93-4ad9-a0ef-14858d949b08","Type":"ContainerStarted","Data":"e1b151c4ea1ee767120270841e9581b0cc0a1f20042a3a1ca5cafcc90992a053"} Sep 30 20:02:55 crc kubenswrapper[4603]: I0930 20:02:55.996718 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" event={"ID":"e262072a-1f18-48fa-a2af-73466cc9a40b","Type":"ContainerStarted","Data":"b0a05b94e956ec0e9c35404cc3084a120dd75a5c68f71e376af323c42f939332"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.000381 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" event={"ID":"5b507d22-1613-4e76-948f-e4d55f160473","Type":"ContainerStarted","Data":"92f0950d35e3807444f1dc9366fb47c9fc3c2501652f6ce97ae15e67667e0db9"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.002528 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.025077 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" event={"ID":"e6858f69-2a71-4459-89d4-59939c74b778","Type":"ContainerStarted","Data":"32203970f6e74cd6b46433081794db704b7642768447619a8de6cf528430e6ed"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.030825 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" event={"ID":"17b3f01d-c7ac-4b96-a90b-02c645fa27ed","Type":"ContainerStarted","Data":"01d299991295a9d500389605e8130794c2fe5676b89f1cb52b4b8d505321e707"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.042643 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" event={"ID":"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2","Type":"ContainerStarted","Data":"a7acc6bdcefb064f1992eda7a7e0cabea4a0e3f8a218f626dfc7d75639fb0c15"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.053795 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" event={"ID":"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870","Type":"ContainerStarted","Data":"3496dd841e60c5d413c4d816cf546b82d315c49c9d79d1acec3643551d519838"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.068442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" event={"ID":"3fb36813-9cc2-4668-ad3a-da10b9594f8a","Type":"ContainerStarted","Data":"6cd89674b4df11726d16b80a44d8c18f39c0b14c90654dce7bb232df89712696"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.080535 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" event={"ID":"63389a19-bdd5-4862-a0b0-f93a5df19823","Type":"ContainerStarted","Data":"d0bcc8d913c32db5d019112c79d19433b57b04af7a50a059e7ef2e3008d41538"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.097951 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" event={"ID":"45381319-4688-4802-a937-e804b3d0e6b1","Type":"ContainerStarted","Data":"e46599656edb0ad1f75ac1412ccd74e873cbc5194efb1b0bc296311c5643f0b8"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.105751 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" event={"ID":"961055da-fa39-4301-b30d-f0a61d41371a","Type":"ContainerStarted","Data":"36cb8709c409c6967edc75d9646ba905f006c857d7cf7a83700273a4ffe9f588"} Sep 30 20:02:56 crc kubenswrapper[4603]: I0930 20:02:56.123047 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" event={"ID":"3e6da4be-f92f-48ee-85e4-f316da7f6e27","Type":"ContainerStarted","Data":"1bf91f3a827336422a01108c78cd0935c43c49bbb999b140eed5b216115b5b7d"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.130929 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" event={"ID":"3a536984-9465-496f-9cfb-f48e32bd0c1b","Type":"ContainerStarted","Data":"86dd990a10a0831385c6ce8c6d75abb7b0b013a5baf59447694536f1534da0fc"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.132941 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" event={"ID":"ac8d681e-168c-401e-9529-54098a214435","Type":"ContainerStarted","Data":"9e0c4bca7823b911fa21d1b8751c04d62e4bea9451f31ed4188f629a92137232"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.134182 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" event={"ID":"0ef59238-520a-4221-8a49-40a4e1a1049d","Type":"ContainerStarted","Data":"b9ffcde1553d4f1dd659147d3efae91eb91a8340e8a63d094b664f7987dd2c56"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.135754 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" event={"ID":"c1669984-9655-488e-a243-0a48f9e381c1","Type":"ContainerStarted","Data":"d172c7191af93b17b9cef642c6ee0107d22ae8dd23f42b8a4582e74862fb9b4c"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.137467 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" event={"ID":"e0835976-81c4-4f6f-aad4-0af0341168e2","Type":"ContainerStarted","Data":"56f39894a92c3380feb22288c0f395c1c7d49259f1a01d2ba9f6bf511658c775"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.138898 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" event={"ID":"9caa0cb4-2c14-430d-ac4a-942c78ec844e","Type":"ContainerStarted","Data":"0f7e44a711876cca802f02b89795cc798c0343725f1e54b8c3a691337cbf06df"} Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.139055 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:02:57 crc kubenswrapper[4603]: I0930 20:02:57.175040 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" podStartSLOduration=4.914840178 podStartE2EDuration="44.175025538s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.776741749 +0000 UTC m=+937.715200567" lastFinishedPulling="2025-09-30 20:02:55.036927099 +0000 UTC m=+976.975385927" observedRunningTime="2025-09-30 20:02:57.171612545 +0000 UTC m=+979.110071383" watchObservedRunningTime="2025-09-30 20:02:57.175025538 +0000 UTC m=+979.113484356" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.146892 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" event={"ID":"9caa0cb4-2c14-430d-ac4a-942c78ec844e","Type":"ContainerStarted","Data":"91a1dff8cd13b65bec10ea8f3b41ce34c03eb5932cab455e240f3905b3014577"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.148894 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" event={"ID":"63389a19-bdd5-4862-a0b0-f93a5df19823","Type":"ContainerStarted","Data":"3373a7057a3b091f6789d373d2d436610bd128d4bc00e0c2d47a33506e7577b2"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.149025 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.150179 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" event={"ID":"3e6da4be-f92f-48ee-85e4-f316da7f6e27","Type":"ContainerStarted","Data":"2a55a06f24b64edc009b59193f7b2b17302e3ad9e257eac40c094459be8c7d05"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.150388 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.151724 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" event={"ID":"b9de699a-42fd-40f8-94e3-ccddd9f2e6c2","Type":"ContainerStarted","Data":"c7613f7fb3308240af8bc0a81f3de657beed3305939e1134fa0c7f838a106d04"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.151852 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.153090 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" event={"ID":"961055da-fa39-4301-b30d-f0a61d41371a","Type":"ContainerStarted","Data":"12f4535c2cd67ff1ab00271beb9ba1ef9cb8994c0e91c1b0eb3ffbbb9766f88b"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.153208 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.154757 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" event={"ID":"e17e463e-0a04-457a-a014-480772f91871","Type":"ContainerStarted","Data":"6b3afdec0a6203593a40de0b359d6b11afcc9bd5e91bacedd2ac30985c3ac98e"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.154790 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.156175 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" event={"ID":"ac8d681e-168c-401e-9529-54098a214435","Type":"ContainerStarted","Data":"7c011c34266576d77e2bcc59898e8b1361db68dd7f04c3632af572200caa8bab"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.157307 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" event={"ID":"58ddfa7e-b740-4d7d-ba1e-22d3c81a5870","Type":"ContainerStarted","Data":"d7cff4102a432ed5dc3984fa20804c85930cd8684575c0e5d006584121ab7eaf"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.157479 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.158601 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" event={"ID":"3fb36813-9cc2-4668-ad3a-da10b9594f8a","Type":"ContainerStarted","Data":"d2e1db631479fd05d3518b12a6b5b9eecfd9a01fc6ab171bf3ed1266e7fa4651"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.159057 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.160292 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" event={"ID":"453ca8a5-9f93-4ad9-a0ef-14858d949b08","Type":"ContainerStarted","Data":"10181c11890519620ae2428fb9a5ceeb263e9393a0daa526b3bc871f26b5d17b"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.160722 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.165120 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" event={"ID":"e262072a-1f18-48fa-a2af-73466cc9a40b","Type":"ContainerStarted","Data":"9a53a5193415388a59b1bf7436290b572526a7ea1f1cdb93837760b0ad9ec0ab"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.165284 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.170028 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" podStartSLOduration=16.085426768 podStartE2EDuration="46.170014198s" podCreationTimestamp="2025-09-30 20:02:12 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.99398139 +0000 UTC m=+936.932440198" lastFinishedPulling="2025-09-30 20:02:45.07856881 +0000 UTC m=+967.017027628" observedRunningTime="2025-09-30 20:02:58.168735832 +0000 UTC m=+980.107194640" watchObservedRunningTime="2025-09-30 20:02:58.170014198 +0000 UTC m=+980.108473016" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.176201 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" event={"ID":"17b3f01d-c7ac-4b96-a90b-02c645fa27ed","Type":"ContainerStarted","Data":"596fef2bd782df8b4b379ab252ce3b5f1e9e639a4b900f76fa7bf56b3b257883"} Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.176731 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.177808 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.194346 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" podStartSLOduration=17.266158323 podStartE2EDuration="46.194323907s" podCreationTimestamp="2025-09-30 20:02:12 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.993881427 +0000 UTC m=+936.932340235" lastFinishedPulling="2025-09-30 20:02:43.922047001 +0000 UTC m=+965.860505819" observedRunningTime="2025-09-30 20:02:58.190107551 +0000 UTC m=+980.128566369" watchObservedRunningTime="2025-09-30 20:02:58.194323907 +0000 UTC m=+980.132782735" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.203815 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" podStartSLOduration=8.74995608 podStartE2EDuration="45.203794109s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.576627732 +0000 UTC m=+937.515086550" lastFinishedPulling="2025-09-30 20:02:52.030465761 +0000 UTC m=+973.968924579" observedRunningTime="2025-09-30 20:02:58.203420908 +0000 UTC m=+980.141879726" watchObservedRunningTime="2025-09-30 20:02:58.203794109 +0000 UTC m=+980.142252937" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.220060 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" podStartSLOduration=16.522168073 podStartE2EDuration="45.220040996s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.221881593 +0000 UTC m=+937.160340441" lastFinishedPulling="2025-09-30 20:02:43.919754546 +0000 UTC m=+965.858213364" observedRunningTime="2025-09-30 20:02:58.218379201 +0000 UTC m=+980.156838009" watchObservedRunningTime="2025-09-30 20:02:58.220040996 +0000 UTC m=+980.158499834" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.237194 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" podStartSLOduration=9.548572440000001 podStartE2EDuration="46.237178909s" podCreationTimestamp="2025-09-30 20:02:12 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.64529571 +0000 UTC m=+936.583754528" lastFinishedPulling="2025-09-30 20:02:51.333902179 +0000 UTC m=+973.272360997" observedRunningTime="2025-09-30 20:02:58.233468456 +0000 UTC m=+980.171927274" watchObservedRunningTime="2025-09-30 20:02:58.237178909 +0000 UTC m=+980.175637727" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.256893 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" podStartSLOduration=16.054746668 podStartE2EDuration="45.256876242s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.251295395 +0000 UTC m=+937.189754213" lastFinishedPulling="2025-09-30 20:02:44.453424969 +0000 UTC m=+966.391883787" observedRunningTime="2025-09-30 20:02:58.252675276 +0000 UTC m=+980.191134094" watchObservedRunningTime="2025-09-30 20:02:58.256876242 +0000 UTC m=+980.195335060" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.288592 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" podStartSLOduration=16.840765795 podStartE2EDuration="45.288577896s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.812140735 +0000 UTC m=+936.750599553" lastFinishedPulling="2025-09-30 20:02:43.259952846 +0000 UTC m=+965.198411654" observedRunningTime="2025-09-30 20:02:58.274760375 +0000 UTC m=+980.213219183" watchObservedRunningTime="2025-09-30 20:02:58.288577896 +0000 UTC m=+980.227036704" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.290745 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" podStartSLOduration=9.617715855 podStartE2EDuration="45.290739506s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.661084614 +0000 UTC m=+937.599543432" lastFinishedPulling="2025-09-30 20:02:51.334108225 +0000 UTC m=+973.272567083" observedRunningTime="2025-09-30 20:02:58.286059806 +0000 UTC m=+980.224518624" watchObservedRunningTime="2025-09-30 20:02:58.290739506 +0000 UTC m=+980.229198324" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.303896 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" podStartSLOduration=16.841139825 podStartE2EDuration="46.303887237s" podCreationTimestamp="2025-09-30 20:02:12 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.988493836 +0000 UTC m=+936.926952654" lastFinishedPulling="2025-09-30 20:02:44.451241248 +0000 UTC m=+966.389700066" observedRunningTime="2025-09-30 20:02:58.302731476 +0000 UTC m=+980.241190304" watchObservedRunningTime="2025-09-30 20:02:58.303887237 +0000 UTC m=+980.242346065" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.324139 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" podStartSLOduration=9.192506562 podStartE2EDuration="45.324125466s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.202718927 +0000 UTC m=+937.141177745" lastFinishedPulling="2025-09-30 20:02:51.334337791 +0000 UTC m=+973.272796649" observedRunningTime="2025-09-30 20:02:58.320079634 +0000 UTC m=+980.258538452" watchObservedRunningTime="2025-09-30 20:02:58.324125466 +0000 UTC m=+980.262584284" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.337224 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x" podStartSLOduration=5.00684849 podStartE2EDuration="44.337215246s" podCreationTimestamp="2025-09-30 20:02:14 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.675224139 +0000 UTC m=+937.613682957" lastFinishedPulling="2025-09-30 20:02:55.005590885 +0000 UTC m=+976.944049713" observedRunningTime="2025-09-30 20:02:58.334266685 +0000 UTC m=+980.272725503" watchObservedRunningTime="2025-09-30 20:02:58.337215246 +0000 UTC m=+980.275674064" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.362870 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" podStartSLOduration=6.014075617 podStartE2EDuration="45.362857483s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.619051318 +0000 UTC m=+937.557510126" lastFinishedPulling="2025-09-30 20:02:54.967833174 +0000 UTC m=+976.906291992" observedRunningTime="2025-09-30 20:02:58.355445529 +0000 UTC m=+980.293904357" watchObservedRunningTime="2025-09-30 20:02:58.362857483 +0000 UTC m=+980.301316311" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.381202 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" podStartSLOduration=6.036396781 podStartE2EDuration="45.381188429s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.665304902 +0000 UTC m=+937.603763710" lastFinishedPulling="2025-09-30 20:02:55.01009653 +0000 UTC m=+976.948555358" observedRunningTime="2025-09-30 20:02:58.380641174 +0000 UTC m=+980.319099992" watchObservedRunningTime="2025-09-30 20:02:58.381188429 +0000 UTC m=+980.319647267" Sep 30 20:02:58 crc kubenswrapper[4603]: I0930 20:02:58.399567 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" podStartSLOduration=6.083977862 podStartE2EDuration="45.399553694s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.664442579 +0000 UTC m=+937.602901397" lastFinishedPulling="2025-09-30 20:02:54.980018411 +0000 UTC m=+976.918477229" observedRunningTime="2025-09-30 20:02:58.39358324 +0000 UTC m=+980.332042068" watchObservedRunningTime="2025-09-30 20:02:58.399553694 +0000 UTC m=+980.338012522" Sep 30 20:02:59 crc kubenswrapper[4603]: I0930 20:02:59.221442 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" podStartSLOduration=18.274008329 podStartE2EDuration="47.22139024s" podCreationTimestamp="2025-09-30 20:02:12 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.968826906 +0000 UTC m=+936.907285724" lastFinishedPulling="2025-09-30 20:02:43.916208817 +0000 UTC m=+965.854667635" observedRunningTime="2025-09-30 20:02:59.207816826 +0000 UTC m=+981.146275664" watchObservedRunningTime="2025-09-30 20:02:59.22139024 +0000 UTC m=+981.159849098" Sep 30 20:02:59 crc kubenswrapper[4603]: I0930 20:02:59.239818 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" podStartSLOduration=16.769755171 podStartE2EDuration="46.239794677s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:14.982643113 +0000 UTC m=+936.921101931" lastFinishedPulling="2025-09-30 20:02:44.452682609 +0000 UTC m=+966.391141437" observedRunningTime="2025-09-30 20:02:59.233496294 +0000 UTC m=+981.171955132" watchObservedRunningTime="2025-09-30 20:02:59.239794677 +0000 UTC m=+981.178253515" Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.196710 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" event={"ID":"45381319-4688-4802-a937-e804b3d0e6b1","Type":"ContainerStarted","Data":"bbdac2c90707bf5ccac95602413c409b830e7bc80e2cf2e02896d5f98616ed10"} Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.197075 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.198683 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" event={"ID":"5b507d22-1613-4e76-948f-e4d55f160473","Type":"ContainerStarted","Data":"fc3efbb57b51e08afd0ef15f7b00af4bd3847d4708044a6d7a88610a70611796"} Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.202289 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-lcsw7" Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.202548 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-s29xr" Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.221385 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" podStartSLOduration=3.099592737 podStartE2EDuration="47.221364226s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.217883331 +0000 UTC m=+937.156342149" lastFinishedPulling="2025-09-30 20:02:59.33965481 +0000 UTC m=+981.278113638" observedRunningTime="2025-09-30 20:03:00.217992373 +0000 UTC m=+982.156451221" watchObservedRunningTime="2025-09-30 20:03:00.221364226 +0000 UTC m=+982.159823054" Sep 30 20:03:00 crc kubenswrapper[4603]: I0930 20:03:00.283800 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" podStartSLOduration=3.156642797 podStartE2EDuration="47.283780666s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.213743395 +0000 UTC m=+937.152202213" lastFinishedPulling="2025-09-30 20:02:59.340881244 +0000 UTC m=+981.279340082" observedRunningTime="2025-09-30 20:03:00.27953312 +0000 UTC m=+982.217991958" watchObservedRunningTime="2025-09-30 20:03:00.283780666 +0000 UTC m=+982.222239484" Sep 30 20:03:01 crc kubenswrapper[4603]: I0930 20:03:01.208088 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" event={"ID":"ae8ee517-97d6-422e-a058-c229d111e654","Type":"ContainerStarted","Data":"a2784815563f1660accc8c62cbf38f73652826206c7deba048971fa09b898003"} Sep 30 20:03:01 crc kubenswrapper[4603]: I0930 20:03:01.208563 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:03:01 crc kubenswrapper[4603]: I0930 20:03:01.209037 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:03:01 crc kubenswrapper[4603]: I0930 20:03:01.229786 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" podStartSLOduration=2.924658848 podStartE2EDuration="48.229760134s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.251681496 +0000 UTC m=+937.190140314" lastFinishedPulling="2025-09-30 20:03:00.556782742 +0000 UTC m=+982.495241600" observedRunningTime="2025-09-30 20:03:01.226082452 +0000 UTC m=+983.164541310" watchObservedRunningTime="2025-09-30 20:03:01.229760134 +0000 UTC m=+983.168218982" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.230782 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-qgzmq" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.252039 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-62mwn" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.319653 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-kn7c7" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.330674 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-p6fms" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.357860 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.360188 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-2m98j" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.668255 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.670149 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-bxvz5" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.692616 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-mg4sm" Sep 30 20:03:03 crc kubenswrapper[4603]: I0930 20:03:03.728884 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-fd845" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.083392 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.085084 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-dqmkb" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.289581 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fbmst" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.304228 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.307083 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-z7l94" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.320701 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-h8j45" Sep 30 20:03:04 crc kubenswrapper[4603]: I0930 20:03:04.358706 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-2gvcj" Sep 30 20:03:05 crc kubenswrapper[4603]: I0930 20:03:05.446698 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-gcb85" Sep 30 20:03:05 crc kubenswrapper[4603]: E0930 20:03:05.768087 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podUID="3a7017fa-c8d0-493d-a338-ec3d2626a289" Sep 30 20:03:06 crc kubenswrapper[4603]: E0930 20:03:06.766066 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podUID="e0158f35-7f0f-4c77-b761-6b624fc675f0" Sep 30 20:03:13 crc kubenswrapper[4603]: I0930 20:03:13.489498 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-n2qf4" Sep 30 20:03:13 crc kubenswrapper[4603]: I0930 20:03:13.869014 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6cvcn" Sep 30 20:03:14 crc kubenswrapper[4603]: I0930 20:03:14.170529 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-28kqj" Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.412931 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" event={"ID":"e0158f35-7f0f-4c77-b761-6b624fc675f0","Type":"ContainerStarted","Data":"5d1b112b796ba84e02244a04372fa903ba1b6abfeb394556e7151e47d911ea5f"} Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.413810 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.417130 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" event={"ID":"3a7017fa-c8d0-493d-a338-ec3d2626a289","Type":"ContainerStarted","Data":"2bc64f5e595147ad53ef49050dbbe9498d09f5914de2cefb8d01eff6f73f7a86"} Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.417445 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.441530 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" podStartSLOduration=3.778097801 podStartE2EDuration="1m7.441505856s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.722410969 +0000 UTC m=+937.660869787" lastFinishedPulling="2025-09-30 20:03:19.385819024 +0000 UTC m=+1001.324277842" observedRunningTime="2025-09-30 20:03:20.433471994 +0000 UTC m=+1002.371930822" watchObservedRunningTime="2025-09-30 20:03:20.441505856 +0000 UTC m=+1002.379964684" Sep 30 20:03:20 crc kubenswrapper[4603]: I0930 20:03:20.462061 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" podStartSLOduration=3.7986661379999997 podStartE2EDuration="1m7.462043702s" podCreationTimestamp="2025-09-30 20:02:13 +0000 UTC" firstStartedPulling="2025-09-30 20:02:15.722380038 +0000 UTC m=+937.660838856" lastFinishedPulling="2025-09-30 20:03:19.385757602 +0000 UTC m=+1001.324216420" observedRunningTime="2025-09-30 20:03:20.456320114 +0000 UTC m=+1002.394778972" watchObservedRunningTime="2025-09-30 20:03:20.462043702 +0000 UTC m=+1002.400502520" Sep 30 20:03:24 crc kubenswrapper[4603]: I0930 20:03:24.980283 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-pszrb" Sep 30 20:03:33 crc kubenswrapper[4603]: I0930 20:03:33.840850 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-6l5w5" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.142337 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.144025 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.149564 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.149572 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-r7sbl" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.151616 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.152236 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.170535 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.279379 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.280711 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.282519 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.332088 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.333615 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km2wm\" (UniqueName: \"kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.333816 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.435446 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.435511 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.435548 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.435596 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km2wm\" (UniqueName: \"kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.435677 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85cjg\" (UniqueName: \"kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.436495 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.471989 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km2wm\" (UniqueName: \"kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm\") pod \"dnsmasq-dns-675f4bcbfc-s8n72\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.537203 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85cjg\" (UniqueName: \"kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.537269 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.537309 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.538407 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.538527 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.553742 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85cjg\" (UniqueName: \"kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg\") pod \"dnsmasq-dns-78dd6ddcc-jzjf6\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.594669 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.768271 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:03:51 crc kubenswrapper[4603]: I0930 20:03:51.985591 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:03:52 crc kubenswrapper[4603]: I0930 20:03:52.004985 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:03:52 crc kubenswrapper[4603]: W0930 20:03:52.015604 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f745a66_992a_4dea_b4e9_9f0d707b7ac0.slice/crio-2df824b0df196f33b8766b4cab17c0b75719e5779e5a11a351c983972a7f1cb1 WatchSource:0}: Error finding container 2df824b0df196f33b8766b4cab17c0b75719e5779e5a11a351c983972a7f1cb1: Status 404 returned error can't find the container with id 2df824b0df196f33b8766b4cab17c0b75719e5779e5a11a351c983972a7f1cb1 Sep 30 20:03:52 crc kubenswrapper[4603]: I0930 20:03:52.695840 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" event={"ID":"4f745a66-992a-4dea-b4e9-9f0d707b7ac0","Type":"ContainerStarted","Data":"2df824b0df196f33b8766b4cab17c0b75719e5779e5a11a351c983972a7f1cb1"} Sep 30 20:03:52 crc kubenswrapper[4603]: I0930 20:03:52.697832 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" event={"ID":"a9bc6d59-5641-445b-aaab-26a07c75794f","Type":"ContainerStarted","Data":"ee6e2713c7759ad87c60f392d24e545d453e52537d95df42c995c8451cb8e070"} Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.112278 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.148609 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.149727 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.154889 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.177368 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.177406 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.177435 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sqw6\" (UniqueName: \"kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.279772 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sqw6\" (UniqueName: \"kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.280051 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.281189 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.281203 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.282037 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.300786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sqw6\" (UniqueName: \"kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6\") pod \"dnsmasq-dns-666b6646f7-25crb\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.440804 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.470253 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.471620 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.480011 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.489869 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.585850 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlb6d\" (UniqueName: \"kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.585902 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.587015 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.688038 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.688126 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.688179 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlb6d\" (UniqueName: \"kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.689125 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.689563 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.714716 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlb6d\" (UniqueName: \"kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d\") pod \"dnsmasq-dns-57d769cc4f-zx7gv\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:54 crc kubenswrapper[4603]: I0930 20:03:54.815427 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.097083 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.296676 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:03:55 crc kubenswrapper[4603]: W0930 20:03:55.321182 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda2da039_d1e4_45c2_a95e_3a6d4cf6714f.slice/crio-44ed5bb82af4509b645dc6064f1da684bf704166ab29ab237c3d62dc1d3ccf54 WatchSource:0}: Error finding container 44ed5bb82af4509b645dc6064f1da684bf704166ab29ab237c3d62dc1d3ccf54: Status 404 returned error can't find the container with id 44ed5bb82af4509b645dc6064f1da684bf704166ab29ab237c3d62dc1d3ccf54 Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.482551 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.484485 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488337 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488426 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488520 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-m724n" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488709 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488796 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.488844 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.497326 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.498541 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519500 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519562 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519584 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519613 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519701 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgq9g\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519838 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519870 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519892 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519909 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519943 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.519998 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.617018 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.618755 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623180 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623212 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623232 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623261 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623287 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623333 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623354 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623381 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623412 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623430 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.623449 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgq9g\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.624116 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.624470 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.624741 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.624888 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hxpdc" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.624957 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.625030 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.625188 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.625337 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.625595 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.626324 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.628010 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.628410 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.637872 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.642738 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.647932 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.651852 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.655905 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.657261 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.659427 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgq9g\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.683833 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.721596 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-25crb" event={"ID":"a92f2676-b428-4bba-9901-161d27ea3ba8","Type":"ContainerStarted","Data":"0a874a4f84d1d196230b08ead69ae42b738e03052b81a15fd991b330607f4f2d"} Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.724396 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" event={"ID":"da2da039-d1e4-45c2-a95e-3a6d4cf6714f","Type":"ContainerStarted","Data":"44ed5bb82af4509b645dc6064f1da684bf704166ab29ab237c3d62dc1d3ccf54"} Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.814606 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826049 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826099 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826122 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826142 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826162 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t69mr\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826285 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826385 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826414 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826433 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826457 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.826475 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927788 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927852 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927870 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927907 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927924 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.927999 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.928028 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.928070 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.928090 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.928107 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t69mr\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.928153 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.929840 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.930381 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.930683 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.930971 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.936084 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.937813 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.940626 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.941645 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.952252 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.952831 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.955039 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t69mr\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:55 crc kubenswrapper[4603]: I0930 20:03:55.961734 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:56 crc kubenswrapper[4603]: I0930 20:03:56.021769 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:03:56 crc kubenswrapper[4603]: I0930 20:03:56.274327 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:03:56 crc kubenswrapper[4603]: I0930 20:03:56.515213 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:03:56 crc kubenswrapper[4603]: I0930 20:03:56.798349 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerStarted","Data":"1646ae28707b9c6f2bd49fe88002ffaf4c54757bb7e7a03826efc9c71cbe7e13"} Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.037596 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.039586 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.044570 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.045558 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.046295 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-dpczv" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.046595 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.050703 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.051396 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.057007 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.159842 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.159890 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.159910 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160156 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-secrets\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160211 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vrv9\" (UniqueName: \"kubernetes.io/projected/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kube-api-access-5vrv9\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160247 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160271 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160295 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.160317 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.263977 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264049 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264090 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264129 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264225 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264250 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264270 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264328 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-secrets\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.264351 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vrv9\" (UniqueName: \"kubernetes.io/projected/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kube-api-access-5vrv9\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.265077 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.269303 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.269467 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.269510 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/95d8cde0-0585-4e08-a44f-34a9ba7034ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.270192 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95d8cde0-0585-4e08-a44f-34a9ba7034ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.281471 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.283943 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-secrets\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.290276 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vrv9\" (UniqueName: \"kubernetes.io/projected/95d8cde0-0585-4e08-a44f-34a9ba7034ee-kube-api-access-5vrv9\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.303932 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d8cde0-0585-4e08-a44f-34a9ba7034ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.306310 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"95d8cde0-0585-4e08-a44f-34a9ba7034ee\") " pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.385845 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.389874 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.391462 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.393597 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.394157 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.394521 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7h798" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.394667 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.395599 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.469870 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.469945 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.469993 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.470016 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.470037 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.472667 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9pdv\" (UniqueName: \"kubernetes.io/projected/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kube-api-access-w9pdv\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.472777 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.472804 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.472823 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.574899 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575269 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9pdv\" (UniqueName: \"kubernetes.io/projected/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kube-api-access-w9pdv\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575328 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575346 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575364 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575404 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575436 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575456 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575477 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575749 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.575912 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.576136 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.577908 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.579143 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.582420 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.582686 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.583804 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.599748 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9pdv\" (UniqueName: \"kubernetes.io/projected/75e5a99f-1349-4c73-bb51-2f101b8dc2ab-kube-api-access-w9pdv\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.609905 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"75e5a99f-1349-4c73-bb51-2f101b8dc2ab\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.715532 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.716462 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.720033 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.720191 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-j5wnb" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.720361 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.722423 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.731061 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.782866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-config-data\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.782913 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-combined-ca-bundle\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.782947 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-memcached-tls-certs\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.783004 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kolla-config\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.783031 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frwlg\" (UniqueName: \"kubernetes.io/projected/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kube-api-access-frwlg\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.888922 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kolla-config\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.888989 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frwlg\" (UniqueName: \"kubernetes.io/projected/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kube-api-access-frwlg\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.889030 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-config-data\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.889054 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-combined-ca-bundle\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.889106 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-memcached-tls-certs\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.890222 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-config-data\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.890712 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kolla-config\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.894313 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-memcached-tls-certs\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.896844 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521a0a10-00f0-4bf2-8d0e-36ed170f6949-combined-ca-bundle\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:58 crc kubenswrapper[4603]: I0930 20:03:58.919786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frwlg\" (UniqueName: \"kubernetes.io/projected/521a0a10-00f0-4bf2-8d0e-36ed170f6949-kube-api-access-frwlg\") pod \"memcached-0\" (UID: \"521a0a10-00f0-4bf2-8d0e-36ed170f6949\") " pod="openstack/memcached-0" Sep 30 20:03:59 crc kubenswrapper[4603]: I0930 20:03:59.043075 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.297899 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.298798 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.301801 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-8jvxw" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.310354 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.417628 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8jgn\" (UniqueName: \"kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn\") pod \"kube-state-metrics-0\" (UID: \"f6e219d6-424d-4f85-8506-d6a0a69ae998\") " pod="openstack/kube-state-metrics-0" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.519635 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8jgn\" (UniqueName: \"kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn\") pod \"kube-state-metrics-0\" (UID: \"f6e219d6-424d-4f85-8506-d6a0a69ae998\") " pod="openstack/kube-state-metrics-0" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.562263 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8jgn\" (UniqueName: \"kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn\") pod \"kube-state-metrics-0\" (UID: \"f6e219d6-424d-4f85-8506-d6a0a69ae998\") " pod="openstack/kube-state-metrics-0" Sep 30 20:04:00 crc kubenswrapper[4603]: I0930 20:04:00.616778 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.699117 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9hpwx"] Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.700513 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.702673 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-dbjmc" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.702960 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.703637 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.718441 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9hpwx"] Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769496 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2911bc12-77af-4d68-858f-28d3cc2e263e-scripts\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769550 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-ovn-controller-tls-certs\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769610 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-combined-ca-bundle\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769635 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769679 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhbbx\" (UniqueName: \"kubernetes.io/projected/2911bc12-77af-4d68-858f-28d3cc2e263e-kube-api-access-zhbbx\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769699 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.769728 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-log-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.783778 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-bjlrh"] Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.785238 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.802179 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bjlrh"] Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.859493 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerStarted","Data":"775779bd2dfd2e3818a8a38eb6405daedab991dc314086abc2bb96aa66f6e9ca"} Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871304 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-etc-ovs\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871383 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-combined-ca-bundle\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871551 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-run\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871585 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871618 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-lib\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871696 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/099f885d-5177-4906-9641-0a42249a549a-scripts\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871728 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2p52\" (UniqueName: \"kubernetes.io/projected/099f885d-5177-4906-9641-0a42249a549a-kube-api-access-m2p52\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871772 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhbbx\" (UniqueName: \"kubernetes.io/projected/2911bc12-77af-4d68-858f-28d3cc2e263e-kube-api-access-zhbbx\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871798 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-log\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871829 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871892 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-log-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871929 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2911bc12-77af-4d68-858f-28d3cc2e263e-scripts\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.871973 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-ovn-controller-tls-certs\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.872081 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.873083 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-run-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.873246 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2911bc12-77af-4d68-858f-28d3cc2e263e-var-log-ovn\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.877237 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2911bc12-77af-4d68-858f-28d3cc2e263e-scripts\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.887008 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-ovn-controller-tls-certs\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.904368 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2911bc12-77af-4d68-858f-28d3cc2e263e-combined-ca-bundle\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.908732 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhbbx\" (UniqueName: \"kubernetes.io/projected/2911bc12-77af-4d68-858f-28d3cc2e263e-kube-api-access-zhbbx\") pod \"ovn-controller-9hpwx\" (UID: \"2911bc12-77af-4d68-858f-28d3cc2e263e\") " pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.973751 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-log\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.973981 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-etc-ovs\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.974058 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-run\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.974110 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-lib\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.974192 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-log\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.974223 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/099f885d-5177-4906-9641-0a42249a549a-scripts\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.974294 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2p52\" (UniqueName: \"kubernetes.io/projected/099f885d-5177-4906-9641-0a42249a549a-kube-api-access-m2p52\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.975069 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-run\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.975275 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-etc-ovs\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.975402 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/099f885d-5177-4906-9641-0a42249a549a-var-lib\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:03 crc kubenswrapper[4603]: I0930 20:04:03.976297 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/099f885d-5177-4906-9641-0a42249a549a-scripts\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:04 crc kubenswrapper[4603]: I0930 20:04:04.002535 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2p52\" (UniqueName: \"kubernetes.io/projected/099f885d-5177-4906-9641-0a42249a549a-kube-api-access-m2p52\") pod \"ovn-controller-ovs-bjlrh\" (UID: \"099f885d-5177-4906-9641-0a42249a549a\") " pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:04 crc kubenswrapper[4603]: I0930 20:04:04.019638 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:04 crc kubenswrapper[4603]: I0930 20:04:04.099065 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:04 crc kubenswrapper[4603]: I0930 20:04:04.733688 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.150001 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.151622 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.153874 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.154239 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-pcxmv" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.154279 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.158839 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.159000 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.166129 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193677 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193886 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193910 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193929 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-config\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193947 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69f6bf19-226d-409f-afba-67be196077f7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193962 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193977 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws6bj\" (UniqueName: \"kubernetes.io/projected/69f6bf19-226d-409f-afba-67be196077f7-kube-api-access-ws6bj\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.193992 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.294953 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295278 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295300 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-config\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295317 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69f6bf19-226d-409f-afba-67be196077f7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295333 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295347 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws6bj\" (UniqueName: \"kubernetes.io/projected/69f6bf19-226d-409f-afba-67be196077f7-kube-api-access-ws6bj\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295361 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295427 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295809 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69f6bf19-226d-409f-afba-67be196077f7-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.295935 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.296447 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-config\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.297037 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69f6bf19-226d-409f-afba-67be196077f7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.301745 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.302242 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.309838 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/69f6bf19-226d-409f-afba-67be196077f7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.313712 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws6bj\" (UniqueName: \"kubernetes.io/projected/69f6bf19-226d-409f-afba-67be196077f7-kube-api-access-ws6bj\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.327330 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"69f6bf19-226d-409f-afba-67be196077f7\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:05 crc kubenswrapper[4603]: I0930 20:04:05.520672 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.082434 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.083819 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.086135 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.086210 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-wsf9w" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.088047 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.088932 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.095328 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.130994 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131069 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131104 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131269 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131328 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131492 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggl7s\" (UniqueName: \"kubernetes.io/projected/9bb34996-e3b5-4c33-aff4-b85b34009e54-kube-api-access-ggl7s\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131615 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-config\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.131729 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233123 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggl7s\" (UniqueName: \"kubernetes.io/projected/9bb34996-e3b5-4c33-aff4-b85b34009e54-kube-api-access-ggl7s\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233232 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-config\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233290 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233355 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233408 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233442 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233471 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233499 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.233924 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.235051 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-config\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.235331 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.236939 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9bb34996-e3b5-4c33-aff4-b85b34009e54-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.240978 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.241472 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.242078 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bb34996-e3b5-4c33-aff4-b85b34009e54-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.254604 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggl7s\" (UniqueName: \"kubernetes.io/projected/9bb34996-e3b5-4c33-aff4-b85b34009e54-kube-api-access-ggl7s\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.274316 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9bb34996-e3b5-4c33-aff4-b85b34009e54\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.409683 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:07 crc kubenswrapper[4603]: W0930 20:04:07.487419 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95d8cde0_0585_4e08_a44f_34a9ba7034ee.slice/crio-1e977e16f0864075f0e51a1675260e174f6a2999749452b3d3a09cae8364aa6a WatchSource:0}: Error finding container 1e977e16f0864075f0e51a1675260e174f6a2999749452b3d3a09cae8364aa6a: Status 404 returned error can't find the container with id 1e977e16f0864075f0e51a1675260e174f6a2999749452b3d3a09cae8364aa6a Sep 30 20:04:07 crc kubenswrapper[4603]: I0930 20:04:07.898354 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95d8cde0-0585-4e08-a44f-34a9ba7034ee","Type":"ContainerStarted","Data":"1e977e16f0864075f0e51a1675260e174f6a2999749452b3d3a09cae8364aa6a"} Sep 30 20:04:08 crc kubenswrapper[4603]: I0930 20:04:08.441909 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:04:08 crc kubenswrapper[4603]: I0930 20:04:08.441965 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:04:15 crc kubenswrapper[4603]: I0930 20:04:15.259204 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.492715 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.492849 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6sqw6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-25crb_openstack(a92f2676-b428-4bba-9901-161d27ea3ba8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.494213 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-25crb" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.716288 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.716984 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-85cjg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-jzjf6_openstack(4f745a66-992a-4dea-b4e9-9f0d707b7ac0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.718926 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" podUID="4f745a66-992a-4dea-b4e9-9f0d707b7ac0" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.733180 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.733356 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-km2wm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-s8n72_openstack(a9bc6d59-5641-445b-aaab-26a07c75794f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.735162 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" podUID="a9bc6d59-5641-445b-aaab-26a07c75794f" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.840830 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.841207 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tlb6d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-zx7gv_openstack(da2da039-d1e4-45c2-a95e-3a6d4cf6714f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.842373 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" Sep 30 20:04:15 crc kubenswrapper[4603]: I0930 20:04:15.966225 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:04:15 crc kubenswrapper[4603]: I0930 20:04:15.973418 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9bb34996-e3b5-4c33-aff4-b85b34009e54","Type":"ContainerStarted","Data":"e9de0a859f70214339f534bc5f9dd2251f308f4b636f54f8eefa965d6d000c05"} Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.974671 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-25crb" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" Sep 30 20:04:15 crc kubenswrapper[4603]: E0930 20:04:15.974687 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.080886 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.184003 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9hpwx"] Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.198583 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:04:16 crc kubenswrapper[4603]: W0930 20:04:16.205942 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2911bc12_77af_4d68_858f_28d3cc2e263e.slice/crio-aacef0207d00a1e8a99fe5098ae326c5fb6696137536d1264a6ca8358c69634d WatchSource:0}: Error finding container aacef0207d00a1e8a99fe5098ae326c5fb6696137536d1264a6ca8358c69634d: Status 404 returned error can't find the container with id aacef0207d00a1e8a99fe5098ae326c5fb6696137536d1264a6ca8358c69634d Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.429712 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bjlrh"] Sep 30 20:04:16 crc kubenswrapper[4603]: W0930 20:04:16.547026 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod099f885d_5177_4906_9641_0a42249a549a.slice/crio-a1482f75c9401a89b01020e27a5af9ad231b3c6c3bb0cb99c1b8f38b39d9b349 WatchSource:0}: Error finding container a1482f75c9401a89b01020e27a5af9ad231b3c6c3bb0cb99c1b8f38b39d9b349: Status 404 returned error can't find the container with id a1482f75c9401a89b01020e27a5af9ad231b3c6c3bb0cb99c1b8f38b39d9b349 Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.627352 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.633966 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.698056 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85cjg\" (UniqueName: \"kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg\") pod \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.698118 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km2wm\" (UniqueName: \"kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm\") pod \"a9bc6d59-5641-445b-aaab-26a07c75794f\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.698230 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config\") pod \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.698277 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config\") pod \"a9bc6d59-5641-445b-aaab-26a07c75794f\" (UID: \"a9bc6d59-5641-445b-aaab-26a07c75794f\") " Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.698317 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc\") pod \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\" (UID: \"4f745a66-992a-4dea-b4e9-9f0d707b7ac0\") " Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.699046 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4f745a66-992a-4dea-b4e9-9f0d707b7ac0" (UID: "4f745a66-992a-4dea-b4e9-9f0d707b7ac0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.699956 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config" (OuterVolumeSpecName: "config") pod "4f745a66-992a-4dea-b4e9-9f0d707b7ac0" (UID: "4f745a66-992a-4dea-b4e9-9f0d707b7ac0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.700241 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config" (OuterVolumeSpecName: "config") pod "a9bc6d59-5641-445b-aaab-26a07c75794f" (UID: "a9bc6d59-5641-445b-aaab-26a07c75794f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.733291 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg" (OuterVolumeSpecName: "kube-api-access-85cjg") pod "4f745a66-992a-4dea-b4e9-9f0d707b7ac0" (UID: "4f745a66-992a-4dea-b4e9-9f0d707b7ac0"). InnerVolumeSpecName "kube-api-access-85cjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.733633 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm" (OuterVolumeSpecName: "kube-api-access-km2wm") pod "a9bc6d59-5641-445b-aaab-26a07c75794f" (UID: "a9bc6d59-5641-445b-aaab-26a07c75794f"). InnerVolumeSpecName "kube-api-access-km2wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.800209 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.800239 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9bc6d59-5641-445b-aaab-26a07c75794f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.800253 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.800265 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85cjg\" (UniqueName: \"kubernetes.io/projected/4f745a66-992a-4dea-b4e9-9f0d707b7ac0-kube-api-access-85cjg\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.800276 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km2wm\" (UniqueName: \"kubernetes.io/projected/a9bc6d59-5641-445b-aaab-26a07c75794f-kube-api-access-km2wm\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.982293 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerStarted","Data":"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.984661 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9hpwx" event={"ID":"2911bc12-77af-4d68-858f-28d3cc2e263e","Type":"ContainerStarted","Data":"aacef0207d00a1e8a99fe5098ae326c5fb6696137536d1264a6ca8358c69634d"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.986031 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" event={"ID":"4f745a66-992a-4dea-b4e9-9f0d707b7ac0","Type":"ContainerDied","Data":"2df824b0df196f33b8766b4cab17c0b75719e5779e5a11a351c983972a7f1cb1"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.986040 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jzjf6" Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.989271 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerStarted","Data":"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.992643 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6e219d6-424d-4f85-8506-d6a0a69ae998","Type":"ContainerStarted","Data":"b741463f933e040a7114e61a87d45f1040c6dcccac2b5387e940f6980ef103e1"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.994413 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" event={"ID":"a9bc6d59-5641-445b-aaab-26a07c75794f","Type":"ContainerDied","Data":"ee6e2713c7759ad87c60f392d24e545d453e52537d95df42c995c8451cb8e070"} Sep 30 20:04:16 crc kubenswrapper[4603]: I0930 20:04:16.994445 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s8n72" Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.006034 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bjlrh" event={"ID":"099f885d-5177-4906-9641-0a42249a549a","Type":"ContainerStarted","Data":"a1482f75c9401a89b01020e27a5af9ad231b3c6c3bb0cb99c1b8f38b39d9b349"} Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.013005 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"75e5a99f-1349-4c73-bb51-2f101b8dc2ab","Type":"ContainerStarted","Data":"23762aec59739ef8a8445ed0630a6696292c6f6463bf7bededea6cdc30374175"} Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.016837 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"521a0a10-00f0-4bf2-8d0e-36ed170f6949","Type":"ContainerStarted","Data":"e7e71a6982d1284fabcedde73a0dee03f1de1b4ff0fd06163ab253414ba3c438"} Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.043496 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.048028 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jzjf6"] Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.115610 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.125038 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s8n72"] Sep 30 20:04:17 crc kubenswrapper[4603]: I0930 20:04:17.237255 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:04:17 crc kubenswrapper[4603]: W0930 20:04:17.340531 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69f6bf19_226d_409f_afba_67be196077f7.slice/crio-069275f806f3a734c44e6d7fddf5322f8adb7b212529c0d3a8bed41c29bd585f WatchSource:0}: Error finding container 069275f806f3a734c44e6d7fddf5322f8adb7b212529c0d3a8bed41c29bd585f: Status 404 returned error can't find the container with id 069275f806f3a734c44e6d7fddf5322f8adb7b212529c0d3a8bed41c29bd585f Sep 30 20:04:18 crc kubenswrapper[4603]: I0930 20:04:18.024730 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"69f6bf19-226d-409f-afba-67be196077f7","Type":"ContainerStarted","Data":"069275f806f3a734c44e6d7fddf5322f8adb7b212529c0d3a8bed41c29bd585f"} Sep 30 20:04:18 crc kubenswrapper[4603]: I0930 20:04:18.772443 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f745a66-992a-4dea-b4e9-9f0d707b7ac0" path="/var/lib/kubelet/pods/4f745a66-992a-4dea-b4e9-9f0d707b7ac0/volumes" Sep 30 20:04:18 crc kubenswrapper[4603]: I0930 20:04:18.773486 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9bc6d59-5641-445b-aaab-26a07c75794f" path="/var/lib/kubelet/pods/a9bc6d59-5641-445b-aaab-26a07c75794f/volumes" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.071064 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9bb34996-e3b5-4c33-aff4-b85b34009e54","Type":"ContainerStarted","Data":"8f0a04367a27de4f637a418aa241a5b12c0295ea1a3c1aac1d0e0b83459ee9f7"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.072849 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"69f6bf19-226d-409f-afba-67be196077f7","Type":"ContainerStarted","Data":"d08288deedb1ba81d68e6f509ac67985dcc74ec17ec16902e97e4010e60cb783"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.074482 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95d8cde0-0585-4e08-a44f-34a9ba7034ee","Type":"ContainerStarted","Data":"2723d4b58170655fa4d918ee4de748bcf11dda3a49dc57c3277bc52e0f0f387c"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.076956 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"521a0a10-00f0-4bf2-8d0e-36ed170f6949","Type":"ContainerStarted","Data":"4c769d9dbe301559f78b90a36a51bba8068eedce805bf697e6ffc3e7014ac537"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.077026 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.078599 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6e219d6-424d-4f85-8506-d6a0a69ae998","Type":"ContainerStarted","Data":"ae9625328bae508cee82254b087a19e5ea9b803c804dac2b667ce2cc8ca584bd"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.078713 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.080248 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bjlrh" event={"ID":"099f885d-5177-4906-9641-0a42249a549a","Type":"ContainerStarted","Data":"b4d4fc9b196259697e073eac514c7b599c75b1bcd63e74bd9ff9ef58beb8c00a"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.083510 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"75e5a99f-1349-4c73-bb51-2f101b8dc2ab","Type":"ContainerStarted","Data":"a4add33cfbae2169272df5f587c731e4f10627f1535db896a94eca999db95eb9"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.085481 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9hpwx" event={"ID":"2911bc12-77af-4d68-858f-28d3cc2e263e","Type":"ContainerStarted","Data":"b0a4b1552b1923bc688397ac3d93f73ebd17c42ed1a58f003c2e627520ca2224"} Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.085610 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-9hpwx" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.121502 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=19.436586407 podStartE2EDuration="26.121480053s" podCreationTimestamp="2025-09-30 20:03:58 +0000 UTC" firstStartedPulling="2025-09-30 20:04:16.09708695 +0000 UTC m=+1058.035545768" lastFinishedPulling="2025-09-30 20:04:22.781980576 +0000 UTC m=+1064.720439414" observedRunningTime="2025-09-30 20:04:24.121445382 +0000 UTC m=+1066.059904200" watchObservedRunningTime="2025-09-30 20:04:24.121480053 +0000 UTC m=+1066.059938871" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.146358 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=16.695993355 podStartE2EDuration="24.146335438s" podCreationTimestamp="2025-09-30 20:04:00 +0000 UTC" firstStartedPulling="2025-09-30 20:04:15.975128874 +0000 UTC m=+1057.913587692" lastFinishedPulling="2025-09-30 20:04:23.425470947 +0000 UTC m=+1065.363929775" observedRunningTime="2025-09-30 20:04:24.142937838 +0000 UTC m=+1066.081396666" watchObservedRunningTime="2025-09-30 20:04:24.146335438 +0000 UTC m=+1066.084794266" Sep 30 20:04:24 crc kubenswrapper[4603]: I0930 20:04:24.205760 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-9hpwx" podStartSLOduration=14.089670718 podStartE2EDuration="21.205744159s" podCreationTimestamp="2025-09-30 20:04:03 +0000 UTC" firstStartedPulling="2025-09-30 20:04:16.211358761 +0000 UTC m=+1058.149817579" lastFinishedPulling="2025-09-30 20:04:23.327432202 +0000 UTC m=+1065.265891020" observedRunningTime="2025-09-30 20:04:24.201597378 +0000 UTC m=+1066.140056196" watchObservedRunningTime="2025-09-30 20:04:24.205744159 +0000 UTC m=+1066.144202977" Sep 30 20:04:25 crc kubenswrapper[4603]: I0930 20:04:25.094610 4603 generic.go:334] "Generic (PLEG): container finished" podID="099f885d-5177-4906-9641-0a42249a549a" containerID="b4d4fc9b196259697e073eac514c7b599c75b1bcd63e74bd9ff9ef58beb8c00a" exitCode=0 Sep 30 20:04:25 crc kubenswrapper[4603]: I0930 20:04:25.095945 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bjlrh" event={"ID":"099f885d-5177-4906-9641-0a42249a549a","Type":"ContainerDied","Data":"b4d4fc9b196259697e073eac514c7b599c75b1bcd63e74bd9ff9ef58beb8c00a"} Sep 30 20:04:26 crc kubenswrapper[4603]: I0930 20:04:26.110639 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bjlrh" event={"ID":"099f885d-5177-4906-9641-0a42249a549a","Type":"ContainerStarted","Data":"45eae6e34cb9566f53131e060bfa5f79cbf20586950939cfa69e3ab5fea3927b"} Sep 30 20:04:26 crc kubenswrapper[4603]: I0930 20:04:26.111229 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:26 crc kubenswrapper[4603]: I0930 20:04:26.111250 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bjlrh" event={"ID":"099f885d-5177-4906-9641-0a42249a549a","Type":"ContainerStarted","Data":"d53a22140599d962274484761b150079a24de63696ba30f886752a8696eb28b1"} Sep 30 20:04:26 crc kubenswrapper[4603]: I0930 20:04:26.138703 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-bjlrh" podStartSLOduration=16.908052367 podStartE2EDuration="23.138594766s" podCreationTimestamp="2025-09-30 20:04:03 +0000 UTC" firstStartedPulling="2025-09-30 20:04:16.551445727 +0000 UTC m=+1058.489904545" lastFinishedPulling="2025-09-30 20:04:22.781988116 +0000 UTC m=+1064.720446944" observedRunningTime="2025-09-30 20:04:26.131835175 +0000 UTC m=+1068.070294023" watchObservedRunningTime="2025-09-30 20:04:26.138594766 +0000 UTC m=+1068.077074634" Sep 30 20:04:27 crc kubenswrapper[4603]: I0930 20:04:27.118741 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:28 crc kubenswrapper[4603]: I0930 20:04:28.129724 4603 generic.go:334] "Generic (PLEG): container finished" podID="75e5a99f-1349-4c73-bb51-2f101b8dc2ab" containerID="a4add33cfbae2169272df5f587c731e4f10627f1535db896a94eca999db95eb9" exitCode=0 Sep 30 20:04:28 crc kubenswrapper[4603]: I0930 20:04:28.129828 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"75e5a99f-1349-4c73-bb51-2f101b8dc2ab","Type":"ContainerDied","Data":"a4add33cfbae2169272df5f587c731e4f10627f1535db896a94eca999db95eb9"} Sep 30 20:04:28 crc kubenswrapper[4603]: I0930 20:04:28.133336 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95d8cde0-0585-4e08-a44f-34a9ba7034ee","Type":"ContainerDied","Data":"2723d4b58170655fa4d918ee4de748bcf11dda3a49dc57c3277bc52e0f0f387c"} Sep 30 20:04:28 crc kubenswrapper[4603]: I0930 20:04:28.133287 4603 generic.go:334] "Generic (PLEG): container finished" podID="95d8cde0-0585-4e08-a44f-34a9ba7034ee" containerID="2723d4b58170655fa4d918ee4de748bcf11dda3a49dc57c3277bc52e0f0f387c" exitCode=0 Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.051352 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.148043 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"69f6bf19-226d-409f-afba-67be196077f7","Type":"ContainerStarted","Data":"aff692c92d09e907aaf08ae28209582221fd9705cca77cfdd401c23930b9afa7"} Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.151222 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"95d8cde0-0585-4e08-a44f-34a9ba7034ee","Type":"ContainerStarted","Data":"0537081db896b4b979c451d680a271782bb59936e3e144062a30cba98727c796"} Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.165757 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"75e5a99f-1349-4c73-bb51-2f101b8dc2ab","Type":"ContainerStarted","Data":"9fc1cd702b9b49a592950d0a34beb4f3da99f0bc85a990ac40422f61af90a0f1"} Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.172676 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"9bb34996-e3b5-4c33-aff4-b85b34009e54","Type":"ContainerStarted","Data":"dfeeaa1090e86e9d7073ac0d2e2f8795f46a91e8e421cbab1e8cf027cee2a55f"} Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.193728 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=14.504357773 podStartE2EDuration="25.193703825s" podCreationTimestamp="2025-09-30 20:04:04 +0000 UTC" firstStartedPulling="2025-09-30 20:04:17.343329572 +0000 UTC m=+1059.281788390" lastFinishedPulling="2025-09-30 20:04:28.032675604 +0000 UTC m=+1069.971134442" observedRunningTime="2025-09-30 20:04:29.182820113 +0000 UTC m=+1071.121278951" watchObservedRunningTime="2025-09-30 20:04:29.193703825 +0000 UTC m=+1071.132162643" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.218228 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.564018478 podStartE2EDuration="32.218207431s" podCreationTimestamp="2025-09-30 20:03:57 +0000 UTC" firstStartedPulling="2025-09-30 20:04:16.20948313 +0000 UTC m=+1058.147941948" lastFinishedPulling="2025-09-30 20:04:22.863672073 +0000 UTC m=+1064.802130901" observedRunningTime="2025-09-30 20:04:29.214367348 +0000 UTC m=+1071.152826166" watchObservedRunningTime="2025-09-30 20:04:29.218207431 +0000 UTC m=+1071.156666259" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.258075 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.297411888 podStartE2EDuration="32.258050898s" podCreationTimestamp="2025-09-30 20:03:57 +0000 UTC" firstStartedPulling="2025-09-30 20:04:07.502453447 +0000 UTC m=+1049.440912275" lastFinishedPulling="2025-09-30 20:04:22.463092477 +0000 UTC m=+1064.401551285" observedRunningTime="2025-09-30 20:04:29.245560543 +0000 UTC m=+1071.184019361" watchObservedRunningTime="2025-09-30 20:04:29.258050898 +0000 UTC m=+1071.196509716" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.275863 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.787173589 podStartE2EDuration="23.275843304s" podCreationTimestamp="2025-09-30 20:04:06 +0000 UTC" firstStartedPulling="2025-09-30 20:04:15.564572621 +0000 UTC m=+1057.503031439" lastFinishedPulling="2025-09-30 20:04:28.053242316 +0000 UTC m=+1069.991701154" observedRunningTime="2025-09-30 20:04:29.272539055 +0000 UTC m=+1071.210997873" watchObservedRunningTime="2025-09-30 20:04:29.275843304 +0000 UTC m=+1071.214302122" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.521474 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:29 crc kubenswrapper[4603]: I0930 20:04:29.569008 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.179866 4603 generic.go:334] "Generic (PLEG): container finished" podID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerID="020dc29eebb1670137815193fbb89aea4ca23d0de9f3743c8aa0b3119bd7a911" exitCode=0 Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.179932 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-25crb" event={"ID":"a92f2676-b428-4bba-9901-161d27ea3ba8","Type":"ContainerDied","Data":"020dc29eebb1670137815193fbb89aea4ca23d0de9f3743c8aa0b3119bd7a911"} Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.182709 4603 generic.go:334] "Generic (PLEG): container finished" podID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerID="7d4b129e86aec77b7ca74f3176457f9473592aa2f98d6b321614986cdc33a2fc" exitCode=0 Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.182754 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" event={"ID":"da2da039-d1e4-45c2-a95e-3a6d4cf6714f","Type":"ContainerDied","Data":"7d4b129e86aec77b7ca74f3176457f9473592aa2f98d6b321614986cdc33a2fc"} Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.183275 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.246805 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.569430 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.605884 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.607071 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.610353 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.624903 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.697307 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.706245 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.733867 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-rjs74"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.734886 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.742392 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.762238 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.762282 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p76t5\" (UniqueName: \"kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.762355 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.762387 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.791782 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.793058 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.801616 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rjs74"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.809803 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.871006 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.871279 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p76t5\" (UniqueName: \"kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.871307 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-combined-ca-bundle\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.872303 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.876115 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovn-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.876190 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.876224 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-config\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.876256 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovs-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.876319 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.886925 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.887670 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.887848 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.888024 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsp48\" (UniqueName: \"kubernetes.io/projected/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-kube-api-access-tsp48\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.907418 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p76t5\" (UniqueName: \"kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5\") pod \"dnsmasq-dns-5bf47b49b7-dc5sd\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.939499 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.989937 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.989998 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovn-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990023 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-config\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990039 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovs-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990068 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhwd2\" (UniqueName: \"kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990087 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990108 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990137 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsp48\" (UniqueName: \"kubernetes.io/projected/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-kube-api-access-tsp48\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990175 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.990217 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-combined-ca-bundle\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.995481 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovn-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.996114 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-config\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:30 crc kubenswrapper[4603]: I0930 20:04:30.996210 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-ovs-rundir\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:30.998914 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:30.999713 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-combined-ca-bundle\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.021150 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsp48\" (UniqueName: \"kubernetes.io/projected/d06f3a9f-3191-4f74-8ccd-e765ca5d6613-kube-api-access-tsp48\") pod \"ovn-controller-metrics-rjs74\" (UID: \"d06f3a9f-3191-4f74-8ccd-e765ca5d6613\") " pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.050670 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rjs74" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.092966 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.093042 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhwd2\" (UniqueName: \"kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.093063 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.093104 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.093900 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.094426 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.095128 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.145490 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhwd2\" (UniqueName: \"kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2\") pod \"dnsmasq-dns-57d65f699f-njdfm\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.213146 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-25crb" event={"ID":"a92f2676-b428-4bba-9901-161d27ea3ba8","Type":"ContainerStarted","Data":"f160cde374b7adb2358d337439af133d012ad8e6ca34a1338c93f9a6818d061e"} Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.213312 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-25crb" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="dnsmasq-dns" containerID="cri-o://f160cde374b7adb2358d337439af133d012ad8e6ca34a1338c93f9a6818d061e" gracePeriod=10 Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.213534 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.226414 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="dnsmasq-dns" containerID="cri-o://0511f2697e50b2bbdcc5f7d575d0b7fb76b35c0d0721197b7df627cd462cdd2f" gracePeriod=10 Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.226639 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" event={"ID":"da2da039-d1e4-45c2-a95e-3a6d4cf6714f","Type":"ContainerStarted","Data":"0511f2697e50b2bbdcc5f7d575d0b7fb76b35c0d0721197b7df627cd462cdd2f"} Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.226684 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.256823 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-25crb" podStartSLOduration=3.100515237 podStartE2EDuration="37.256805919s" podCreationTimestamp="2025-09-30 20:03:54 +0000 UTC" firstStartedPulling="2025-09-30 20:03:55.122318766 +0000 UTC m=+1037.060777574" lastFinishedPulling="2025-09-30 20:04:29.278609438 +0000 UTC m=+1071.217068256" observedRunningTime="2025-09-30 20:04:31.25458235 +0000 UTC m=+1073.193041168" watchObservedRunningTime="2025-09-30 20:04:31.256805919 +0000 UTC m=+1073.195264737" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.274510 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.276824 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.297027 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" podStartSLOduration=-9223371999.557766 podStartE2EDuration="37.297010536s" podCreationTimestamp="2025-09-30 20:03:54 +0000 UTC" firstStartedPulling="2025-09-30 20:03:55.333254814 +0000 UTC m=+1037.271713632" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:04:31.295430594 +0000 UTC m=+1073.233889412" watchObservedRunningTime="2025-09-30 20:04:31.297010536 +0000 UTC m=+1073.235469354" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.332229 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.333897 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.348510 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.377211 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.418488 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.470787 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.509836 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.509900 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74nts\" (UniqueName: \"kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.509953 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.509978 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.510003 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.611116 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.611199 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74nts\" (UniqueName: \"kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.611272 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.611301 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.612245 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.612328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.612888 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.613069 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.613483 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.633746 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74nts\" (UniqueName: \"kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts\") pod \"dnsmasq-dns-b8fbc5445-6rp55\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: E0930 20:04:31.683905 4603 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.129:56882->38.102.83.129:39427: write tcp 38.102.83.129:56882->38.102.83.129:39427: write: broken pipe Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.695692 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.775812 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rjs74"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.783643 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.828369 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.835175 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.840501 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.842448 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.842588 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.842694 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-fq6wm" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.849873 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.915221 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.923356 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-cache\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.923406 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.923479 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.923512 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjtkr\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-kube-api-access-rjtkr\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:31 crc kubenswrapper[4603]: I0930 20:04:31.923545 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-lock\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.024796 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.024853 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjtkr\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-kube-api-access-rjtkr\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.024942 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-lock\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.025021 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-cache\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.025052 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.025281 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.025306 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.025363 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:04:32.52533988 +0000 UTC m=+1074.463798708 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.027618 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.027639 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-cache\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.031335 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c71b4eca-ba52-40ee-88e6-f0b50794825d-lock\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.061862 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.065923 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjtkr\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-kube-api-access-rjtkr\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.233944 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" event={"ID":"f7df3627-025d-4dd7-b461-0aa2e77a334a","Type":"ContainerStarted","Data":"1f1243f1adca0ca5231616ba2b0a0f5898376c8193ce296249f9e1eba909487c"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.236305 4603 generic.go:334] "Generic (PLEG): container finished" podID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerID="0511f2697e50b2bbdcc5f7d575d0b7fb76b35c0d0721197b7df627cd462cdd2f" exitCode=0 Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.236405 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" event={"ID":"da2da039-d1e4-45c2-a95e-3a6d4cf6714f","Type":"ContainerDied","Data":"0511f2697e50b2bbdcc5f7d575d0b7fb76b35c0d0721197b7df627cd462cdd2f"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.240403 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rjs74" event={"ID":"d06f3a9f-3191-4f74-8ccd-e765ca5d6613","Type":"ContainerStarted","Data":"f354f6d9a842c55778887f7644ae89f9b3a02ec026a1498f36d604ed554e149f"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.241670 4603 generic.go:334] "Generic (PLEG): container finished" podID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerID="e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7" exitCode=0 Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.241707 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" event={"ID":"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5","Type":"ContainerDied","Data":"e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.241720 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" event={"ID":"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5","Type":"ContainerStarted","Data":"11e6531f1293fa215c58ff36e4ca4f3af18c1f351a8c89d8ecf25127f6f74c85"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.251412 4603 generic.go:334] "Generic (PLEG): container finished" podID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerID="f160cde374b7adb2358d337439af133d012ad8e6ca34a1338c93f9a6818d061e" exitCode=0 Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.251493 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-25crb" event={"ID":"a92f2676-b428-4bba-9901-161d27ea3ba8","Type":"ContainerDied","Data":"f160cde374b7adb2358d337439af133d012ad8e6ca34a1338c93f9a6818d061e"} Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.252251 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.257253 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:04:32 crc kubenswrapper[4603]: W0930 20:04:32.308351 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7487171_f64a_433c_b167_e757a12c60d6.slice/crio-8fb979d1f9cd18b3407865eb95dfda3a80657c00e9bf1e9db7c6087cb9382fbd WatchSource:0}: Error finding container 8fb979d1f9cd18b3407865eb95dfda3a80657c00e9bf1e9db7c6087cb9382fbd: Status 404 returned error can't find the container with id 8fb979d1f9cd18b3407865eb95dfda3a80657c00e9bf1e9db7c6087cb9382fbd Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.340913 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.454565 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.558509 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.561929 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.566233 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.566332 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:04:33.566295705 +0000 UTC m=+1075.504754523 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.583441 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614252 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.614654 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="init" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614666 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="init" Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.614696 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614703 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.614714 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614720 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: E0930 20:04:32.614735 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="init" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614742 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="init" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614893 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.614911 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" containerName="dnsmasq-dns" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.615761 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.619207 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.619436 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.619631 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-252sz" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.619733 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.650233 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669218 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config\") pod \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669272 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sqw6\" (UniqueName: \"kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6\") pod \"a92f2676-b428-4bba-9901-161d27ea3ba8\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669331 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config\") pod \"a92f2676-b428-4bba-9901-161d27ea3ba8\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669469 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc\") pod \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669517 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc\") pod \"a92f2676-b428-4bba-9901-161d27ea3ba8\" (UID: \"a92f2676-b428-4bba-9901-161d27ea3ba8\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669542 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlb6d\" (UniqueName: \"kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d\") pod \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\" (UID: \"da2da039-d1e4-45c2-a95e-3a6d4cf6714f\") " Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669754 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-scripts\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669777 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-config\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669812 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669830 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669859 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669931 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.669956 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w649v\" (UniqueName: \"kubernetes.io/projected/f5abf540-1fba-4b2e-83c6-4be4e500f153-kube-api-access-w649v\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.677136 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6" (OuterVolumeSpecName: "kube-api-access-6sqw6") pod "a92f2676-b428-4bba-9901-161d27ea3ba8" (UID: "a92f2676-b428-4bba-9901-161d27ea3ba8"). InnerVolumeSpecName "kube-api-access-6sqw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.680525 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d" (OuterVolumeSpecName: "kube-api-access-tlb6d") pod "da2da039-d1e4-45c2-a95e-3a6d4cf6714f" (UID: "da2da039-d1e4-45c2-a95e-3a6d4cf6714f"). InnerVolumeSpecName "kube-api-access-tlb6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.730417 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "da2da039-d1e4-45c2-a95e-3a6d4cf6714f" (UID: "da2da039-d1e4-45c2-a95e-3a6d4cf6714f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.739292 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config" (OuterVolumeSpecName: "config") pod "a92f2676-b428-4bba-9901-161d27ea3ba8" (UID: "a92f2676-b428-4bba-9901-161d27ea3ba8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.747869 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a92f2676-b428-4bba-9901-161d27ea3ba8" (UID: "a92f2676-b428-4bba-9901-161d27ea3ba8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.752346 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config" (OuterVolumeSpecName: "config") pod "da2da039-d1e4-45c2-a95e-3a6d4cf6714f" (UID: "da2da039-d1e4-45c2-a95e-3a6d4cf6714f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772558 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772599 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w649v\" (UniqueName: \"kubernetes.io/projected/f5abf540-1fba-4b2e-83c6-4be4e500f153-kube-api-access-w649v\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772639 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-scripts\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772658 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-config\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772688 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772711 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772736 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772774 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772783 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772793 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlb6d\" (UniqueName: \"kubernetes.io/projected/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-kube-api-access-tlb6d\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772804 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da2da039-d1e4-45c2-a95e-3a6d4cf6714f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772823 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sqw6\" (UniqueName: \"kubernetes.io/projected/a92f2676-b428-4bba-9901-161d27ea3ba8-kube-api-access-6sqw6\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.772833 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a92f2676-b428-4bba-9901-161d27ea3ba8-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.773207 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.774537 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-scripts\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.775041 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5abf540-1fba-4b2e-83c6-4be4e500f153-config\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.776023 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.778238 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.787238 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5abf540-1fba-4b2e-83c6-4be4e500f153-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.794410 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w649v\" (UniqueName: \"kubernetes.io/projected/f5abf540-1fba-4b2e-83c6-4be4e500f153-kube-api-access-w649v\") pod \"ovn-northd-0\" (UID: \"f5abf540-1fba-4b2e-83c6-4be4e500f153\") " pod="openstack/ovn-northd-0" Sep 30 20:04:32 crc kubenswrapper[4603]: I0930 20:04:32.953042 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.260525 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" event={"ID":"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5","Type":"ContainerStarted","Data":"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.262283 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.264524 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-25crb" event={"ID":"a92f2676-b428-4bba-9901-161d27ea3ba8","Type":"ContainerDied","Data":"0a874a4f84d1d196230b08ead69ae42b738e03052b81a15fd991b330607f4f2d"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.264587 4603 scope.go:117] "RemoveContainer" containerID="f160cde374b7adb2358d337439af133d012ad8e6ca34a1338c93f9a6818d061e" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.264644 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-25crb" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.281391 4603 generic.go:334] "Generic (PLEG): container finished" podID="b7487171-f64a-433c-b167-e757a12c60d6" containerID="a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819" exitCode=0 Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.281468 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" event={"ID":"b7487171-f64a-433c-b167-e757a12c60d6","Type":"ContainerDied","Data":"a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.281492 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" event={"ID":"b7487171-f64a-433c-b167-e757a12c60d6","Type":"ContainerStarted","Data":"8fb979d1f9cd18b3407865eb95dfda3a80657c00e9bf1e9db7c6087cb9382fbd"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.294941 4603 generic.go:334] "Generic (PLEG): container finished" podID="f7df3627-025d-4dd7-b461-0aa2e77a334a" containerID="0ed8e7b5c58d1a0993b37e47b1492d136a853afda214c1fa6ffd39d1f823e185" exitCode=0 Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.295010 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" event={"ID":"f7df3627-025d-4dd7-b461-0aa2e77a334a","Type":"ContainerDied","Data":"0ed8e7b5c58d1a0993b37e47b1492d136a853afda214c1fa6ffd39d1f823e185"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.306174 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" event={"ID":"da2da039-d1e4-45c2-a95e-3a6d4cf6714f","Type":"ContainerDied","Data":"44ed5bb82af4509b645dc6064f1da684bf704166ab29ab237c3d62dc1d3ccf54"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.306292 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zx7gv" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.314711 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rjs74" event={"ID":"d06f3a9f-3191-4f74-8ccd-e765ca5d6613","Type":"ContainerStarted","Data":"365997ef404c72202112e1afd7f384b8916b135fab0b082db5a40b6d3e907bf7"} Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.318205 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" podStartSLOduration=3.318186868 podStartE2EDuration="3.318186868s" podCreationTimestamp="2025-09-30 20:04:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:04:33.290547728 +0000 UTC m=+1075.229006546" watchObservedRunningTime="2025-09-30 20:04:33.318186868 +0000 UTC m=+1075.256645686" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.323704 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.334640 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-25crb"] Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.388415 4603 scope.go:117] "RemoveContainer" containerID="020dc29eebb1670137815193fbb89aea4ca23d0de9f3743c8aa0b3119bd7a911" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.400423 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-rjs74" podStartSLOduration=3.400403741 podStartE2EDuration="3.400403741s" podCreationTimestamp="2025-09-30 20:04:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:04:33.362729311 +0000 UTC m=+1075.301188129" watchObservedRunningTime="2025-09-30 20:04:33.400403741 +0000 UTC m=+1075.338862559" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.475660 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.505358 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zx7gv"] Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.543437 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.550440 4603 scope.go:117] "RemoveContainer" containerID="0511f2697e50b2bbdcc5f7d575d0b7fb76b35c0d0721197b7df627cd462cdd2f" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.597095 4603 scope.go:117] "RemoveContainer" containerID="7d4b129e86aec77b7ca74f3176457f9473592aa2f98d6b321614986cdc33a2fc" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.599486 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:33 crc kubenswrapper[4603]: E0930 20:04:33.599715 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:33 crc kubenswrapper[4603]: E0930 20:04:33.599730 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:33 crc kubenswrapper[4603]: E0930 20:04:33.599776 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:04:35.599758219 +0000 UTC m=+1077.538217037 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.678805 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.804052 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config\") pod \"f7df3627-025d-4dd7-b461-0aa2e77a334a\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.804113 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc\") pod \"f7df3627-025d-4dd7-b461-0aa2e77a334a\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.804155 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhwd2\" (UniqueName: \"kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2\") pod \"f7df3627-025d-4dd7-b461-0aa2e77a334a\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.804206 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb\") pod \"f7df3627-025d-4dd7-b461-0aa2e77a334a\" (UID: \"f7df3627-025d-4dd7-b461-0aa2e77a334a\") " Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.810246 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2" (OuterVolumeSpecName: "kube-api-access-fhwd2") pod "f7df3627-025d-4dd7-b461-0aa2e77a334a" (UID: "f7df3627-025d-4dd7-b461-0aa2e77a334a"). InnerVolumeSpecName "kube-api-access-fhwd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.824396 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f7df3627-025d-4dd7-b461-0aa2e77a334a" (UID: "f7df3627-025d-4dd7-b461-0aa2e77a334a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.830328 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f7df3627-025d-4dd7-b461-0aa2e77a334a" (UID: "f7df3627-025d-4dd7-b461-0aa2e77a334a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.838808 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config" (OuterVolumeSpecName: "config") pod "f7df3627-025d-4dd7-b461-0aa2e77a334a" (UID: "f7df3627-025d-4dd7-b461-0aa2e77a334a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.906379 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.906410 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhwd2\" (UniqueName: \"kubernetes.io/projected/f7df3627-025d-4dd7-b461-0aa2e77a334a-kube-api-access-fhwd2\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.906422 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:33 crc kubenswrapper[4603]: I0930 20:04:33.906433 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7df3627-025d-4dd7-b461-0aa2e77a334a-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.321634 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f5abf540-1fba-4b2e-83c6-4be4e500f153","Type":"ContainerStarted","Data":"da81ebcaa8edffd2d12cac4c46b0dcdb8d634fbb6dbbe863366907e5302f76d7"} Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.325546 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" event={"ID":"b7487171-f64a-433c-b167-e757a12c60d6","Type":"ContainerStarted","Data":"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe"} Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.325676 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.327452 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" event={"ID":"f7df3627-025d-4dd7-b461-0aa2e77a334a","Type":"ContainerDied","Data":"1f1243f1adca0ca5231616ba2b0a0f5898376c8193ce296249f9e1eba909487c"} Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.327491 4603 scope.go:117] "RemoveContainer" containerID="0ed8e7b5c58d1a0993b37e47b1492d136a853afda214c1fa6ffd39d1f823e185" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.327594 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-njdfm" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.359498 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" podStartSLOduration=3.359479012 podStartE2EDuration="3.359479012s" podCreationTimestamp="2025-09-30 20:04:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:04:34.351658572 +0000 UTC m=+1076.290117390" watchObservedRunningTime="2025-09-30 20:04:34.359479012 +0000 UTC m=+1076.297937830" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.459140 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.464422 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-njdfm"] Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.775540 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a92f2676-b428-4bba-9901-161d27ea3ba8" path="/var/lib/kubelet/pods/a92f2676-b428-4bba-9901-161d27ea3ba8/volumes" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.776747 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da2da039-d1e4-45c2-a95e-3a6d4cf6714f" path="/var/lib/kubelet/pods/da2da039-d1e4-45c2-a95e-3a6d4cf6714f/volumes" Sep 30 20:04:34 crc kubenswrapper[4603]: I0930 20:04:34.777835 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7df3627-025d-4dd7-b461-0aa2e77a334a" path="/var/lib/kubelet/pods/f7df3627-025d-4dd7-b461-0aa2e77a334a/volumes" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.639445 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:35 crc kubenswrapper[4603]: E0930 20:04:35.640473 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:35 crc kubenswrapper[4603]: E0930 20:04:35.640861 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:35 crc kubenswrapper[4603]: E0930 20:04:35.640950 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:04:39.640928005 +0000 UTC m=+1081.579386833 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.756301 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-h66ml"] Sep 30 20:04:35 crc kubenswrapper[4603]: E0930 20:04:35.756683 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7df3627-025d-4dd7-b461-0aa2e77a334a" containerName="init" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.756704 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7df3627-025d-4dd7-b461-0aa2e77a334a" containerName="init" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.756907 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7df3627-025d-4dd7-b461-0aa2e77a334a" containerName="init" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.757583 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.760353 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.761469 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.761714 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.774154 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-h66ml"] Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.842839 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.842917 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.843105 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.843256 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbjmp\" (UniqueName: \"kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.843319 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.843588 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.843719 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.945803 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.945856 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.945888 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.945919 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbjmp\" (UniqueName: \"kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.945948 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.946004 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.946058 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.946866 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.947271 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.947357 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.954100 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.954201 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.954614 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:35 crc kubenswrapper[4603]: I0930 20:04:35.967361 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbjmp\" (UniqueName: \"kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp\") pod \"swift-ring-rebalance-h66ml\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:36 crc kubenswrapper[4603]: I0930 20:04:36.083698 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:36 crc kubenswrapper[4603]: I0930 20:04:36.342612 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-h66ml"] Sep 30 20:04:36 crc kubenswrapper[4603]: W0930 20:04:36.355954 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbdec4dd6_c244_40d6_89c3_0644dd9421de.slice/crio-3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236 WatchSource:0}: Error finding container 3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236: Status 404 returned error can't find the container with id 3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236 Sep 30 20:04:36 crc kubenswrapper[4603]: E0930 20:04:36.688398 4603 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.129:56900->38.102.83.129:39427: write tcp 38.102.83.129:56900->38.102.83.129:39427: write: broken pipe Sep 30 20:04:37 crc kubenswrapper[4603]: I0930 20:04:37.367006 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h66ml" event={"ID":"bdec4dd6-c244-40d6-89c3-0644dd9421de","Type":"ContainerStarted","Data":"3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236"} Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.387377 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.388420 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.443648 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.443734 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.500304 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.720997 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 20:04:38 crc kubenswrapper[4603]: I0930 20:04:38.721062 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 20:04:39 crc kubenswrapper[4603]: I0930 20:04:39.453298 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 20:04:39 crc kubenswrapper[4603]: I0930 20:04:39.718717 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:39 crc kubenswrapper[4603]: E0930 20:04:39.718875 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:39 crc kubenswrapper[4603]: E0930 20:04:39.718888 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:39 crc kubenswrapper[4603]: E0930 20:04:39.718930 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:04:47.718916595 +0000 UTC m=+1089.657375413 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:40 crc kubenswrapper[4603]: I0930 20:04:40.941554 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:41 crc kubenswrapper[4603]: I0930 20:04:41.697615 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:04:41 crc kubenswrapper[4603]: I0930 20:04:41.737698 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 20:04:41 crc kubenswrapper[4603]: I0930 20:04:41.763501 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:41 crc kubenswrapper[4603]: I0930 20:04:41.763739 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="dnsmasq-dns" containerID="cri-o://645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5" gracePeriod=10 Sep 30 20:04:41 crc kubenswrapper[4603]: I0930 20:04:41.837603 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="75e5a99f-1349-4c73-bb51-2f101b8dc2ab" containerName="galera" probeResult="failure" output=< Sep 30 20:04:41 crc kubenswrapper[4603]: wsrep_local_state_comment (Joined) differs from Synced Sep 30 20:04:41 crc kubenswrapper[4603]: > Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.321974 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.411939 4603 generic.go:334] "Generic (PLEG): container finished" podID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerID="645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5" exitCode=0 Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.411991 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" event={"ID":"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5","Type":"ContainerDied","Data":"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5"} Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.412015 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" event={"ID":"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5","Type":"ContainerDied","Data":"11e6531f1293fa215c58ff36e4ca4f3af18c1f351a8c89d8ecf25127f6f74c85"} Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.412030 4603 scope.go:117] "RemoveContainer" containerID="645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.412197 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-dc5sd" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.414691 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f5abf540-1fba-4b2e-83c6-4be4e500f153","Type":"ContainerStarted","Data":"7f1ac7595fee1606120a1f29759c21e2d42b0c02bc678938da34e635ca464373"} Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.430757 4603 scope.go:117] "RemoveContainer" containerID="e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.462603 4603 scope.go:117] "RemoveContainer" containerID="645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5" Sep 30 20:04:42 crc kubenswrapper[4603]: E0930 20:04:42.462989 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5\": container with ID starting with 645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5 not found: ID does not exist" containerID="645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.463023 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5"} err="failed to get container status \"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5\": rpc error: code = NotFound desc = could not find container \"645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5\": container with ID starting with 645bdab73d0cddd891d8a195f0e31b7e7b1426eab70b07ae306a77cb6ddf26c5 not found: ID does not exist" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.463048 4603 scope.go:117] "RemoveContainer" containerID="e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7" Sep 30 20:04:42 crc kubenswrapper[4603]: E0930 20:04:42.463268 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7\": container with ID starting with e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7 not found: ID does not exist" containerID="e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.463297 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7"} err="failed to get container status \"e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7\": rpc error: code = NotFound desc = could not find container \"e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7\": container with ID starting with e33c23a35a4ba149e71d3c32fdd1eac16f7e8768bbd7c99b641359ed97436ed7 not found: ID does not exist" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.470068 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config\") pod \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.470121 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb\") pod \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.470260 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc\") pod \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.470408 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p76t5\" (UniqueName: \"kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5\") pod \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\" (UID: \"2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5\") " Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.474210 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5" (OuterVolumeSpecName: "kube-api-access-p76t5") pod "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" (UID: "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5"). InnerVolumeSpecName "kube-api-access-p76t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.510090 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config" (OuterVolumeSpecName: "config") pod "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" (UID: "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.512136 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" (UID: "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.514551 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" (UID: "2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.572154 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.572201 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.572211 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.572221 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p76t5\" (UniqueName: \"kubernetes.io/projected/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5-kube-api-access-p76t5\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.757186 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:42 crc kubenswrapper[4603]: I0930 20:04:42.776424 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-dc5sd"] Sep 30 20:04:43 crc kubenswrapper[4603]: I0930 20:04:43.425439 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f5abf540-1fba-4b2e-83c6-4be4e500f153","Type":"ContainerStarted","Data":"212a231b9cfa734da67d2af0efaf01d8fd537d868de1581fe8f44131b186f3c9"} Sep 30 20:04:43 crc kubenswrapper[4603]: I0930 20:04:43.425688 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 20:04:43 crc kubenswrapper[4603]: I0930 20:04:43.448441 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.367379242 podStartE2EDuration="11.448424582s" podCreationTimestamp="2025-09-30 20:04:32 +0000 UTC" firstStartedPulling="2025-09-30 20:04:33.577350839 +0000 UTC m=+1075.515809657" lastFinishedPulling="2025-09-30 20:04:41.658396179 +0000 UTC m=+1083.596854997" observedRunningTime="2025-09-30 20:04:43.443986723 +0000 UTC m=+1085.382445541" watchObservedRunningTime="2025-09-30 20:04:43.448424582 +0000 UTC m=+1085.386883400" Sep 30 20:04:44 crc kubenswrapper[4603]: I0930 20:04:44.777873 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" path="/var/lib/kubelet/pods/2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5/volumes" Sep 30 20:04:46 crc kubenswrapper[4603]: I0930 20:04:46.455778 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h66ml" event={"ID":"bdec4dd6-c244-40d6-89c3-0644dd9421de","Type":"ContainerStarted","Data":"84fccc18c09cde4da91b0dbe74a34b6f82e89dabb28e18a000ec0f6e55a3b1b6"} Sep 30 20:04:46 crc kubenswrapper[4603]: I0930 20:04:46.490538 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-h66ml" podStartSLOduration=2.597405086 podStartE2EDuration="11.490520342s" podCreationTimestamp="2025-09-30 20:04:35 +0000 UTC" firstStartedPulling="2025-09-30 20:04:36.359638921 +0000 UTC m=+1078.298097739" lastFinishedPulling="2025-09-30 20:04:45.252754167 +0000 UTC m=+1087.191212995" observedRunningTime="2025-09-30 20:04:46.487714027 +0000 UTC m=+1088.426172845" watchObservedRunningTime="2025-09-30 20:04:46.490520342 +0000 UTC m=+1088.428979160" Sep 30 20:04:47 crc kubenswrapper[4603]: I0930 20:04:47.764487 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:04:47 crc kubenswrapper[4603]: E0930 20:04:47.764763 4603 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:04:47 crc kubenswrapper[4603]: E0930 20:04:47.764796 4603 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:04:47 crc kubenswrapper[4603]: E0930 20:04:47.764886 4603 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift podName:c71b4eca-ba52-40ee-88e6-f0b50794825d nodeName:}" failed. No retries permitted until 2025-09-30 20:05:03.764847155 +0000 UTC m=+1105.703305973 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift") pod "swift-storage-0" (UID: "c71b4eca-ba52-40ee-88e6-f0b50794825d") : configmap "swift-ring-files" not found Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.472406 4603 generic.go:334] "Generic (PLEG): container finished" podID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerID="60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7" exitCode=0 Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.472446 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerDied","Data":"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7"} Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.750489 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-9wjnk"] Sep 30 20:04:48 crc kubenswrapper[4603]: E0930 20:04:48.751020 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="dnsmasq-dns" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.751037 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="dnsmasq-dns" Sep 30 20:04:48 crc kubenswrapper[4603]: E0930 20:04:48.751066 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="init" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.751072 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="init" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.751243 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bae083c-913f-4ef8-a71f-0cd0d6bc1bb5" containerName="dnsmasq-dns" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.751748 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.775475 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9wjnk"] Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.882485 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thqld\" (UniqueName: \"kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld\") pod \"keystone-db-create-9wjnk\" (UID: \"bb23a0b8-7639-452d-8c51-07b383706c82\") " pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.984294 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thqld\" (UniqueName: \"kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld\") pod \"keystone-db-create-9wjnk\" (UID: \"bb23a0b8-7639-452d-8c51-07b383706c82\") " pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.988900 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.997354 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-fswsp"] Sep 30 20:04:48 crc kubenswrapper[4603]: I0930 20:04:48.998261 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fswsp" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.012481 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fswsp"] Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.015524 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thqld\" (UniqueName: \"kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld\") pod \"keystone-db-create-9wjnk\" (UID: \"bb23a0b8-7639-452d-8c51-07b383706c82\") " pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.067625 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.087827 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp7s7\" (UniqueName: \"kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7\") pod \"placement-db-create-fswsp\" (UID: \"89aa30a7-1e3a-4413-921b-7cf759e4396a\") " pod="openstack/placement-db-create-fswsp" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.190644 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp7s7\" (UniqueName: \"kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7\") pod \"placement-db-create-fswsp\" (UID: \"89aa30a7-1e3a-4413-921b-7cf759e4396a\") " pod="openstack/placement-db-create-fswsp" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.216007 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp7s7\" (UniqueName: \"kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7\") pod \"placement-db-create-fswsp\" (UID: \"89aa30a7-1e3a-4413-921b-7cf759e4396a\") " pod="openstack/placement-db-create-fswsp" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.307468 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-9rbkg"] Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.308426 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.323504 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fswsp" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.350626 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-9rbkg"] Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.398845 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bggqs\" (UniqueName: \"kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs\") pod \"glance-db-create-9rbkg\" (UID: \"c78450b9-5e4b-4193-b593-a3d4d19f644a\") " pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.501367 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bggqs\" (UniqueName: \"kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs\") pod \"glance-db-create-9rbkg\" (UID: \"c78450b9-5e4b-4193-b593-a3d4d19f644a\") " pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.505859 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerStarted","Data":"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df"} Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.506760 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.523788 4603 generic.go:334] "Generic (PLEG): container finished" podID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerID="19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3" exitCode=0 Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.523832 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerDied","Data":"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3"} Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.567661 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bggqs\" (UniqueName: \"kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs\") pod \"glance-db-create-9rbkg\" (UID: \"c78450b9-5e4b-4193-b593-a3d4d19f644a\") " pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.592646 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.09636419 podStartE2EDuration="55.59262559s" podCreationTimestamp="2025-09-30 20:03:54 +0000 UTC" firstStartedPulling="2025-09-30 20:04:03.110259814 +0000 UTC m=+1045.048718632" lastFinishedPulling="2025-09-30 20:04:15.606521214 +0000 UTC m=+1057.544980032" observedRunningTime="2025-09-30 20:04:49.54782228 +0000 UTC m=+1091.486281098" watchObservedRunningTime="2025-09-30 20:04:49.59262559 +0000 UTC m=+1091.531084408" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.636943 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9wjnk"] Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.662319 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:49 crc kubenswrapper[4603]: I0930 20:04:49.877466 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fswsp"] Sep 30 20:04:49 crc kubenswrapper[4603]: W0930 20:04:49.886446 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89aa30a7_1e3a_4413_921b_7cf759e4396a.slice/crio-0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f WatchSource:0}: Error finding container 0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f: Status 404 returned error can't find the container with id 0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.155862 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-9rbkg"] Sep 30 20:04:50 crc kubenswrapper[4603]: W0930 20:04:50.157435 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc78450b9_5e4b_4193_b593_a3d4d19f644a.slice/crio-c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2 WatchSource:0}: Error finding container c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2: Status 404 returned error can't find the container with id c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2 Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.536357 4603 generic.go:334] "Generic (PLEG): container finished" podID="bb23a0b8-7639-452d-8c51-07b383706c82" containerID="2e7751a8c6b3c137f2a4a924250b8e90584210c76255eedd69a0a56e29d0e60d" exitCode=0 Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.536416 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9wjnk" event={"ID":"bb23a0b8-7639-452d-8c51-07b383706c82","Type":"ContainerDied","Data":"2e7751a8c6b3c137f2a4a924250b8e90584210c76255eedd69a0a56e29d0e60d"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.536808 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9wjnk" event={"ID":"bb23a0b8-7639-452d-8c51-07b383706c82","Type":"ContainerStarted","Data":"9a2a065ff3ceeb6f7b9d1f089da33fb3fb171e84cec1c4c5f7f10ea1dbe388b7"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.538770 4603 generic.go:334] "Generic (PLEG): container finished" podID="c78450b9-5e4b-4193-b593-a3d4d19f644a" containerID="479e2006b025369985ea9ae04e36043299cae4ad887e69d513c56e1c3dd76224" exitCode=0 Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.538909 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9rbkg" event={"ID":"c78450b9-5e4b-4193-b593-a3d4d19f644a","Type":"ContainerDied","Data":"479e2006b025369985ea9ae04e36043299cae4ad887e69d513c56e1c3dd76224"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.538989 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9rbkg" event={"ID":"c78450b9-5e4b-4193-b593-a3d4d19f644a","Type":"ContainerStarted","Data":"c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.541219 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerStarted","Data":"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.541385 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.542446 4603 generic.go:334] "Generic (PLEG): container finished" podID="89aa30a7-1e3a-4413-921b-7cf759e4396a" containerID="2091078eb8571c9e7f9ed81d4647dfbcf7256a20de91c5638b8431017b65934c" exitCode=0 Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.542498 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fswsp" event={"ID":"89aa30a7-1e3a-4413-921b-7cf759e4396a","Type":"ContainerDied","Data":"2091078eb8571c9e7f9ed81d4647dfbcf7256a20de91c5638b8431017b65934c"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.542627 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fswsp" event={"ID":"89aa30a7-1e3a-4413-921b-7cf759e4396a","Type":"ContainerStarted","Data":"0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f"} Sep 30 20:04:50 crc kubenswrapper[4603]: I0930 20:04:50.588387 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.300710756 podStartE2EDuration="56.588370903s" podCreationTimestamp="2025-09-30 20:03:54 +0000 UTC" firstStartedPulling="2025-09-30 20:03:56.296005295 +0000 UTC m=+1038.234464123" lastFinishedPulling="2025-09-30 20:04:15.583665452 +0000 UTC m=+1057.522124270" observedRunningTime="2025-09-30 20:04:50.586644317 +0000 UTC m=+1092.525103135" watchObservedRunningTime="2025-09-30 20:04:50.588370903 +0000 UTC m=+1092.526829721" Sep 30 20:04:51 crc kubenswrapper[4603]: I0930 20:04:51.903469 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.041882 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thqld\" (UniqueName: \"kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld\") pod \"bb23a0b8-7639-452d-8c51-07b383706c82\" (UID: \"bb23a0b8-7639-452d-8c51-07b383706c82\") " Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.047046 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld" (OuterVolumeSpecName: "kube-api-access-thqld") pod "bb23a0b8-7639-452d-8c51-07b383706c82" (UID: "bb23a0b8-7639-452d-8c51-07b383706c82"). InnerVolumeSpecName "kube-api-access-thqld". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.114909 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fswsp" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.123204 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.173321 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thqld\" (UniqueName: \"kubernetes.io/projected/bb23a0b8-7639-452d-8c51-07b383706c82-kube-api-access-thqld\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.274499 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp7s7\" (UniqueName: \"kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7\") pod \"89aa30a7-1e3a-4413-921b-7cf759e4396a\" (UID: \"89aa30a7-1e3a-4413-921b-7cf759e4396a\") " Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.274713 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bggqs\" (UniqueName: \"kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs\") pod \"c78450b9-5e4b-4193-b593-a3d4d19f644a\" (UID: \"c78450b9-5e4b-4193-b593-a3d4d19f644a\") " Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.290947 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7" (OuterVolumeSpecName: "kube-api-access-kp7s7") pod "89aa30a7-1e3a-4413-921b-7cf759e4396a" (UID: "89aa30a7-1e3a-4413-921b-7cf759e4396a"). InnerVolumeSpecName "kube-api-access-kp7s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.291027 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs" (OuterVolumeSpecName: "kube-api-access-bggqs") pod "c78450b9-5e4b-4193-b593-a3d4d19f644a" (UID: "c78450b9-5e4b-4193-b593-a3d4d19f644a"). InnerVolumeSpecName "kube-api-access-bggqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.376998 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bggqs\" (UniqueName: \"kubernetes.io/projected/c78450b9-5e4b-4193-b593-a3d4d19f644a-kube-api-access-bggqs\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.377321 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp7s7\" (UniqueName: \"kubernetes.io/projected/89aa30a7-1e3a-4413-921b-7cf759e4396a-kube-api-access-kp7s7\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.558344 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fswsp" event={"ID":"89aa30a7-1e3a-4413-921b-7cf759e4396a","Type":"ContainerDied","Data":"0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f"} Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.558382 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0af89077bd0b3672fd9d4a5e4319975343fcb36bbfccee03b389d617f519413f" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.558432 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fswsp" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.570076 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9wjnk" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.570101 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9wjnk" event={"ID":"bb23a0b8-7639-452d-8c51-07b383706c82","Type":"ContainerDied","Data":"9a2a065ff3ceeb6f7b9d1f089da33fb3fb171e84cec1c4c5f7f10ea1dbe388b7"} Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.570130 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a2a065ff3ceeb6f7b9d1f089da33fb3fb171e84cec1c4c5f7f10ea1dbe388b7" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.573421 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-9rbkg" event={"ID":"c78450b9-5e4b-4193-b593-a3d4d19f644a","Type":"ContainerDied","Data":"c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2"} Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.573450 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c98bf056d5f75617e4aa77d78ced8957ba4e17d18b6a65f4e6b58f6f0bee4bc2" Sep 30 20:04:52 crc kubenswrapper[4603]: I0930 20:04:52.573496 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-9rbkg" Sep 30 20:04:53 crc kubenswrapper[4603]: I0930 20:04:53.008070 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 20:04:53 crc kubenswrapper[4603]: I0930 20:04:53.580753 4603 generic.go:334] "Generic (PLEG): container finished" podID="bdec4dd6-c244-40d6-89c3-0644dd9421de" containerID="84fccc18c09cde4da91b0dbe74a34b6f82e89dabb28e18a000ec0f6e55a3b1b6" exitCode=0 Sep 30 20:04:53 crc kubenswrapper[4603]: I0930 20:04:53.580796 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h66ml" event={"ID":"bdec4dd6-c244-40d6-89c3-0644dd9421de","Type":"ContainerDied","Data":"84fccc18c09cde4da91b0dbe74a34b6f82e89dabb28e18a000ec0f6e55a3b1b6"} Sep 30 20:04:54 crc kubenswrapper[4603]: I0930 20:04:54.056582 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9hpwx" podUID="2911bc12-77af-4d68-858f-28d3cc2e263e" containerName="ovn-controller" probeResult="failure" output=< Sep 30 20:04:54 crc kubenswrapper[4603]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 20:04:54 crc kubenswrapper[4603]: > Sep 30 20:04:54 crc kubenswrapper[4603]: I0930 20:04:54.933717 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.118715 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbjmp\" (UniqueName: \"kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.118833 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.118875 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.118958 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.118982 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.119058 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.119103 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts\") pod \"bdec4dd6-c244-40d6-89c3-0644dd9421de\" (UID: \"bdec4dd6-c244-40d6-89c3-0644dd9421de\") " Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.119999 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.120398 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.127279 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp" (OuterVolumeSpecName: "kube-api-access-jbjmp") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "kube-api-access-jbjmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.130583 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.148493 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts" (OuterVolumeSpecName: "scripts") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.168973 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.193362 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "bdec4dd6-c244-40d6-89c3-0644dd9421de" (UID: "bdec4dd6-c244-40d6-89c3-0644dd9421de"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.221909 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbjmp\" (UniqueName: \"kubernetes.io/projected/bdec4dd6-c244-40d6-89c3-0644dd9421de-kube-api-access-jbjmp\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.221952 4603 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/bdec4dd6-c244-40d6-89c3-0644dd9421de-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.221966 4603 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.221977 4603 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.221989 4603 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.222001 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdec4dd6-c244-40d6-89c3-0644dd9421de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.222012 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdec4dd6-c244-40d6-89c3-0644dd9421de-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.620825 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h66ml" event={"ID":"bdec4dd6-c244-40d6-89c3-0644dd9421de","Type":"ContainerDied","Data":"3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236"} Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.621110 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cbb3441b6658d0172791a5454394c73d652bbc37f4436c5c2aa51bec6824236" Sep 30 20:04:55 crc kubenswrapper[4603]: I0930 20:04:55.621203 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h66ml" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.812087 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-90e2-account-create-782q5"] Sep 30 20:04:58 crc kubenswrapper[4603]: E0930 20:04:58.814378 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78450b9-5e4b-4193-b593-a3d4d19f644a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.814556 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78450b9-5e4b-4193-b593-a3d4d19f644a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: E0930 20:04:58.814643 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb23a0b8-7639-452d-8c51-07b383706c82" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.814708 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb23a0b8-7639-452d-8c51-07b383706c82" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: E0930 20:04:58.814769 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89aa30a7-1e3a-4413-921b-7cf759e4396a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.814839 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="89aa30a7-1e3a-4413-921b-7cf759e4396a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: E0930 20:04:58.814914 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdec4dd6-c244-40d6-89c3-0644dd9421de" containerName="swift-ring-rebalance" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.814967 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdec4dd6-c244-40d6-89c3-0644dd9421de" containerName="swift-ring-rebalance" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.815202 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="89aa30a7-1e3a-4413-921b-7cf759e4396a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.815278 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c78450b9-5e4b-4193-b593-a3d4d19f644a" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.815339 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdec4dd6-c244-40d6-89c3-0644dd9421de" containerName="swift-ring-rebalance" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.815410 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb23a0b8-7639-452d-8c51-07b383706c82" containerName="mariadb-database-create" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.815982 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.818473 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.831353 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-90e2-account-create-782q5"] Sep 30 20:04:58 crc kubenswrapper[4603]: I0930 20:04:58.986605 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcbrq\" (UniqueName: \"kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq\") pod \"keystone-90e2-account-create-782q5\" (UID: \"877f3d6c-8f86-4b20-b6c1-0fffb1668924\") " pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.063181 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9hpwx" podUID="2911bc12-77af-4d68-858f-28d3cc2e263e" containerName="ovn-controller" probeResult="failure" output=< Sep 30 20:04:59 crc kubenswrapper[4603]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 20:04:59 crc kubenswrapper[4603]: > Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.088486 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcbrq\" (UniqueName: \"kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq\") pod \"keystone-90e2-account-create-782q5\" (UID: \"877f3d6c-8f86-4b20-b6c1-0fffb1668924\") " pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.114152 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcbrq\" (UniqueName: \"kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq\") pod \"keystone-90e2-account-create-782q5\" (UID: \"877f3d6c-8f86-4b20-b6c1-0fffb1668924\") " pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.133070 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.135693 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-f373-account-create-gp9pr"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.136884 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.140994 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.168651 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f373-account-create-gp9pr"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.194447 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.214768 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bjlrh" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.290889 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb628\" (UniqueName: \"kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628\") pod \"placement-f373-account-create-gp9pr\" (UID: \"05206bfc-d8c0-4584-99d7-e6db6a05c14e\") " pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.392157 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb628\" (UniqueName: \"kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628\") pod \"placement-f373-account-create-gp9pr\" (UID: \"05206bfc-d8c0-4584-99d7-e6db6a05c14e\") " pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.434425 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb628\" (UniqueName: \"kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628\") pod \"placement-f373-account-create-gp9pr\" (UID: \"05206bfc-d8c0-4584-99d7-e6db6a05c14e\") " pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.512580 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9hpwx-config-vnv25"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.515520 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.521050 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.541058 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.544466 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9hpwx-config-vnv25"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.560240 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-f8e9-account-create-7pvts"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.563236 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.569928 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.572222 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f8e9-account-create-7pvts"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.707074 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-90e2-account-create-782q5"] Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708066 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708190 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708314 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708424 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55k92\" (UniqueName: \"kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92\") pod \"glance-f8e9-account-create-7pvts\" (UID: \"10f773b8-d62b-4ef7-94c6-3c264a0428c0\") " pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708531 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftnnj\" (UniqueName: \"kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708608 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.708696 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: W0930 20:04:59.714420 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod877f3d6c_8f86_4b20_b6c1_0fffb1668924.slice/crio-290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42 WatchSource:0}: Error finding container 290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42: Status 404 returned error can't find the container with id 290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42 Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.809648 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.809919 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.809968 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810010 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55k92\" (UniqueName: \"kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92\") pod \"glance-f8e9-account-create-7pvts\" (UID: \"10f773b8-d62b-4ef7-94c6-3c264a0428c0\") " pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810069 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftnnj\" (UniqueName: \"kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810087 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810145 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810443 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810653 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810701 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.810759 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.812878 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.832847 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55k92\" (UniqueName: \"kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92\") pod \"glance-f8e9-account-create-7pvts\" (UID: \"10f773b8-d62b-4ef7-94c6-3c264a0428c0\") " pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.836430 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftnnj\" (UniqueName: \"kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj\") pod \"ovn-controller-9hpwx-config-vnv25\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.894745 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:04:59 crc kubenswrapper[4603]: I0930 20:04:59.905855 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.265605 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f373-account-create-gp9pr"] Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.340674 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9hpwx-config-vnv25"] Sep 30 20:05:00 crc kubenswrapper[4603]: W0930 20:05:00.408840 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19f86e18_c543_4565_90a8_e0e6def8826f.slice/crio-0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e WatchSource:0}: Error finding container 0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e: Status 404 returned error can't find the container with id 0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.498097 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f8e9-account-create-7pvts"] Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.677324 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9hpwx-config-vnv25" event={"ID":"19f86e18-c543-4565-90a8-e0e6def8826f","Type":"ContainerStarted","Data":"0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e"} Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.679450 4603 generic.go:334] "Generic (PLEG): container finished" podID="877f3d6c-8f86-4b20-b6c1-0fffb1668924" containerID="8e3dd8175de389fc27b0ee35ee462aa0e6b8558a05cb30f503c979cecc90f946" exitCode=0 Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.679505 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-90e2-account-create-782q5" event={"ID":"877f3d6c-8f86-4b20-b6c1-0fffb1668924","Type":"ContainerDied","Data":"8e3dd8175de389fc27b0ee35ee462aa0e6b8558a05cb30f503c979cecc90f946"} Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.679523 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-90e2-account-create-782q5" event={"ID":"877f3d6c-8f86-4b20-b6c1-0fffb1668924","Type":"ContainerStarted","Data":"290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42"} Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.680843 4603 generic.go:334] "Generic (PLEG): container finished" podID="05206bfc-d8c0-4584-99d7-e6db6a05c14e" containerID="b623828695c337f7af1c8501565cced4a898d473cfde8f3bdcbf3ddfe9e68048" exitCode=0 Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.680904 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f373-account-create-gp9pr" event={"ID":"05206bfc-d8c0-4584-99d7-e6db6a05c14e","Type":"ContainerDied","Data":"b623828695c337f7af1c8501565cced4a898d473cfde8f3bdcbf3ddfe9e68048"} Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.680927 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f373-account-create-gp9pr" event={"ID":"05206bfc-d8c0-4584-99d7-e6db6a05c14e","Type":"ContainerStarted","Data":"10151c352d58d95abd32fb819212487df6cd68de4ebfbd98715b05380e3c8b60"} Sep 30 20:05:00 crc kubenswrapper[4603]: I0930 20:05:00.681980 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8e9-account-create-7pvts" event={"ID":"10f773b8-d62b-4ef7-94c6-3c264a0428c0","Type":"ContainerStarted","Data":"8b3773dfce9caa7709c13e18b5dba1610bff4a0ac8f5684a7a3985d5b863ad24"} Sep 30 20:05:01 crc kubenswrapper[4603]: I0930 20:05:01.696204 4603 generic.go:334] "Generic (PLEG): container finished" podID="10f773b8-d62b-4ef7-94c6-3c264a0428c0" containerID="8ea5bd3f41ee547f776496ed2d7f22bc68225392c89bd8691d53d592eae101c2" exitCode=0 Sep 30 20:05:01 crc kubenswrapper[4603]: I0930 20:05:01.696357 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8e9-account-create-7pvts" event={"ID":"10f773b8-d62b-4ef7-94c6-3c264a0428c0","Type":"ContainerDied","Data":"8ea5bd3f41ee547f776496ed2d7f22bc68225392c89bd8691d53d592eae101c2"} Sep 30 20:05:01 crc kubenswrapper[4603]: I0930 20:05:01.702313 4603 generic.go:334] "Generic (PLEG): container finished" podID="19f86e18-c543-4565-90a8-e0e6def8826f" containerID="8eb42c67af21f2f98dd885ec9c009633d97762be66d4eafea9795211261763ac" exitCode=0 Sep 30 20:05:01 crc kubenswrapper[4603]: I0930 20:05:01.702607 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9hpwx-config-vnv25" event={"ID":"19f86e18-c543-4565-90a8-e0e6def8826f","Type":"ContainerDied","Data":"8eb42c67af21f2f98dd885ec9c009633d97762be66d4eafea9795211261763ac"} Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.067437 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.075015 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.154352 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcbrq\" (UniqueName: \"kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq\") pod \"877f3d6c-8f86-4b20-b6c1-0fffb1668924\" (UID: \"877f3d6c-8f86-4b20-b6c1-0fffb1668924\") " Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.155295 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb628\" (UniqueName: \"kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628\") pod \"05206bfc-d8c0-4584-99d7-e6db6a05c14e\" (UID: \"05206bfc-d8c0-4584-99d7-e6db6a05c14e\") " Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.160953 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq" (OuterVolumeSpecName: "kube-api-access-xcbrq") pod "877f3d6c-8f86-4b20-b6c1-0fffb1668924" (UID: "877f3d6c-8f86-4b20-b6c1-0fffb1668924"). InnerVolumeSpecName "kube-api-access-xcbrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.161109 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628" (OuterVolumeSpecName: "kube-api-access-zb628") pod "05206bfc-d8c0-4584-99d7-e6db6a05c14e" (UID: "05206bfc-d8c0-4584-99d7-e6db6a05c14e"). InnerVolumeSpecName "kube-api-access-zb628". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.257245 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb628\" (UniqueName: \"kubernetes.io/projected/05206bfc-d8c0-4584-99d7-e6db6a05c14e-kube-api-access-zb628\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.257282 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcbrq\" (UniqueName: \"kubernetes.io/projected/877f3d6c-8f86-4b20-b6c1-0fffb1668924-kube-api-access-xcbrq\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.716508 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-90e2-account-create-782q5" event={"ID":"877f3d6c-8f86-4b20-b6c1-0fffb1668924","Type":"ContainerDied","Data":"290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42"} Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.716552 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="290fa009ca0aaaaa45f5f1338f787b9a631c5c22ffff72716af544ae2e40bc42" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.716614 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-90e2-account-create-782q5" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.720370 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f373-account-create-gp9pr" event={"ID":"05206bfc-d8c0-4584-99d7-e6db6a05c14e","Type":"ContainerDied","Data":"10151c352d58d95abd32fb819212487df6cd68de4ebfbd98715b05380e3c8b60"} Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.720440 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10151c352d58d95abd32fb819212487df6cd68de4ebfbd98715b05380e3c8b60" Sep 30 20:05:02 crc kubenswrapper[4603]: I0930 20:05:02.720913 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f373-account-create-gp9pr" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.133604 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.140258 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.179701 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.179769 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.179800 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.179843 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftnnj\" (UniqueName: \"kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.179987 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180019 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55k92\" (UniqueName: \"kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92\") pod \"10f773b8-d62b-4ef7-94c6-3c264a0428c0\" (UID: \"10f773b8-d62b-4ef7-94c6-3c264a0428c0\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180055 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts\") pod \"19f86e18-c543-4565-90a8-e0e6def8826f\" (UID: \"19f86e18-c543-4565-90a8-e0e6def8826f\") " Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180107 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180137 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180112 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run" (OuterVolumeSpecName: "var-run") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.180892 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181339 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts" (OuterVolumeSpecName: "scripts") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181499 4603 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181521 4603 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181538 4603 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181549 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19f86e18-c543-4565-90a8-e0e6def8826f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.181559 4603 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/19f86e18-c543-4565-90a8-e0e6def8826f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.185867 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj" (OuterVolumeSpecName: "kube-api-access-ftnnj") pod "19f86e18-c543-4565-90a8-e0e6def8826f" (UID: "19f86e18-c543-4565-90a8-e0e6def8826f"). InnerVolumeSpecName "kube-api-access-ftnnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.201337 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92" (OuterVolumeSpecName: "kube-api-access-55k92") pod "10f773b8-d62b-4ef7-94c6-3c264a0428c0" (UID: "10f773b8-d62b-4ef7-94c6-3c264a0428c0"). InnerVolumeSpecName "kube-api-access-55k92". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.283734 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55k92\" (UniqueName: \"kubernetes.io/projected/10f773b8-d62b-4ef7-94c6-3c264a0428c0-kube-api-access-55k92\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.283778 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftnnj\" (UniqueName: \"kubernetes.io/projected/19f86e18-c543-4565-90a8-e0e6def8826f-kube-api-access-ftnnj\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.729068 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f8e9-account-create-7pvts" event={"ID":"10f773b8-d62b-4ef7-94c6-3c264a0428c0","Type":"ContainerDied","Data":"8b3773dfce9caa7709c13e18b5dba1610bff4a0ac8f5684a7a3985d5b863ad24"} Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.729107 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b3773dfce9caa7709c13e18b5dba1610bff4a0ac8f5684a7a3985d5b863ad24" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.729185 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f8e9-account-create-7pvts" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.733651 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9hpwx-config-vnv25" event={"ID":"19f86e18-c543-4565-90a8-e0e6def8826f","Type":"ContainerDied","Data":"0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e"} Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.733707 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d45823e0087c942ebcafb0f5f24c4eb46b00f2ae6c3286a80ca56f249adc05e" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.733708 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9hpwx-config-vnv25" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.793314 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.798309 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c71b4eca-ba52-40ee-88e6-f0b50794825d-etc-swift\") pod \"swift-storage-0\" (UID: \"c71b4eca-ba52-40ee-88e6-f0b50794825d\") " pod="openstack/swift-storage-0" Sep 30 20:05:03 crc kubenswrapper[4603]: I0930 20:05:03.987326 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.266529 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-9hpwx" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.305275 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9hpwx-config-vnv25"] Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.319016 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9hpwx-config-vnv25"] Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.790934 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19f86e18-c543-4565-90a8-e0e6def8826f" path="/var/lib/kubelet/pods/19f86e18-c543-4565-90a8-e0e6def8826f/volumes" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.791866 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-gb2nj"] Sep 30 20:05:04 crc kubenswrapper[4603]: E0930 20:05:04.792189 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05206bfc-d8c0-4584-99d7-e6db6a05c14e" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792207 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="05206bfc-d8c0-4584-99d7-e6db6a05c14e" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: E0930 20:05:04.792232 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877f3d6c-8f86-4b20-b6c1-0fffb1668924" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792242 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="877f3d6c-8f86-4b20-b6c1-0fffb1668924" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: E0930 20:05:04.792266 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10f773b8-d62b-4ef7-94c6-3c264a0428c0" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792275 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="10f773b8-d62b-4ef7-94c6-3c264a0428c0" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: E0930 20:05:04.792288 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f86e18-c543-4565-90a8-e0e6def8826f" containerName="ovn-config" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792296 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f86e18-c543-4565-90a8-e0e6def8826f" containerName="ovn-config" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792495 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="10f773b8-d62b-4ef7-94c6-3c264a0428c0" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792532 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="877f3d6c-8f86-4b20-b6c1-0fffb1668924" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792543 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="19f86e18-c543-4565-90a8-e0e6def8826f" containerName="ovn-config" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.792552 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="05206bfc-d8c0-4584-99d7-e6db6a05c14e" containerName="mariadb-account-create" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.793151 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.798571 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-gb2nj"] Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.800506 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-fnjbp" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.802264 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.809291 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dzsm\" (UniqueName: \"kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.809363 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.809451 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.809471 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.877627 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:05:04 crc kubenswrapper[4603]: W0930 20:05:04.886624 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc71b4eca_ba52_40ee_88e6_f0b50794825d.slice/crio-96e603bfc1302eb63b0cf3ae2f5eeba7d14c81993f72e9ed452282f1c87f61d3 WatchSource:0}: Error finding container 96e603bfc1302eb63b0cf3ae2f5eeba7d14c81993f72e9ed452282f1c87f61d3: Status 404 returned error can't find the container with id 96e603bfc1302eb63b0cf3ae2f5eeba7d14c81993f72e9ed452282f1c87f61d3 Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.910010 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.910042 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.910083 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dzsm\" (UniqueName: \"kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.910134 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.915092 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.915192 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.917817 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:04 crc kubenswrapper[4603]: I0930 20:05:04.927739 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dzsm\" (UniqueName: \"kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm\") pod \"glance-db-sync-gb2nj\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:05 crc kubenswrapper[4603]: I0930 20:05:05.130763 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-gb2nj" Sep 30 20:05:05 crc kubenswrapper[4603]: I0930 20:05:05.500957 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-gb2nj"] Sep 30 20:05:05 crc kubenswrapper[4603]: W0930 20:05:05.510400 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4255d3b6_48b0_4a39_8991_bd70191f02ee.slice/crio-15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5 WatchSource:0}: Error finding container 15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5: Status 404 returned error can't find the container with id 15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5 Sep 30 20:05:05 crc kubenswrapper[4603]: I0930 20:05:05.756432 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"96e603bfc1302eb63b0cf3ae2f5eeba7d14c81993f72e9ed452282f1c87f61d3"} Sep 30 20:05:05 crc kubenswrapper[4603]: I0930 20:05:05.757206 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-gb2nj" event={"ID":"4255d3b6-48b0-4a39-8991-bd70191f02ee","Type":"ContainerStarted","Data":"15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5"} Sep 30 20:05:05 crc kubenswrapper[4603]: I0930 20:05:05.819382 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.026386 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.148973 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-f2p2m"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.158142 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.180191 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-f2p2m"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.236992 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmkdq\" (UniqueName: \"kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq\") pod \"cinder-db-create-f2p2m\" (UID: \"4d5951d8-7653-4312-978c-f31522c6c282\") " pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.254327 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-swff5"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.260400 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-swff5" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.285503 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-swff5"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.339100 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmkdq\" (UniqueName: \"kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq\") pod \"cinder-db-create-f2p2m\" (UID: \"4d5951d8-7653-4312-978c-f31522c6c282\") " pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.339156 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kj8r\" (UniqueName: \"kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r\") pod \"barbican-db-create-swff5\" (UID: \"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7\") " pod="openstack/barbican-db-create-swff5" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.391309 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmkdq\" (UniqueName: \"kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq\") pod \"cinder-db-create-f2p2m\" (UID: \"4d5951d8-7653-4312-978c-f31522c6c282\") " pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.431080 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-z7jg7"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.432345 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.442707 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7jg7"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.450767 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kj8r\" (UniqueName: \"kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r\") pod \"barbican-db-create-swff5\" (UID: \"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7\") " pod="openstack/barbican-db-create-swff5" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.486769 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kj8r\" (UniqueName: \"kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r\") pod \"barbican-db-create-swff5\" (UID: \"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7\") " pod="openstack/barbican-db-create-swff5" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.499137 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.552190 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7fd4\" (UniqueName: \"kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4\") pod \"neutron-db-create-z7jg7\" (UID: \"799d29f7-1992-4c4a-8f3e-dd18927238a2\") " pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.566635 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-7rprt"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.567802 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.574469 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.574634 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.574796 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hqmtj" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.580553 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7rprt"] Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.582582 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.591284 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-swff5" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.653849 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb8gk\" (UniqueName: \"kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.654227 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.654341 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7fd4\" (UniqueName: \"kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4\") pod \"neutron-db-create-z7jg7\" (UID: \"799d29f7-1992-4c4a-8f3e-dd18927238a2\") " pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.654656 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.672718 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7fd4\" (UniqueName: \"kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4\") pod \"neutron-db-create-z7jg7\" (UID: \"799d29f7-1992-4c4a-8f3e-dd18927238a2\") " pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.751467 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.757219 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb8gk\" (UniqueName: \"kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.757256 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.757320 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.768758 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.773772 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.786038 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb8gk\" (UniqueName: \"kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk\") pod \"keystone-db-sync-7rprt\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.793363 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"157cd1dc562c4616365a025cd47ef3a280ec1cc8077ef435a3bff0749645f5c8"} Sep 30 20:05:06 crc kubenswrapper[4603]: I0930 20:05:06.907561 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.230882 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-swff5"] Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.287103 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-f2p2m"] Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.707511 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z7jg7"] Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.804380 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7jg7" event={"ID":"799d29f7-1992-4c4a-8f3e-dd18927238a2","Type":"ContainerStarted","Data":"3d2d3c30e71e2bc1e41dec27942aca7244b4ce5f16221fa5a03a49c20ffe84a4"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.808396 4603 generic.go:334] "Generic (PLEG): container finished" podID="2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" containerID="9bf80d0bd69f0f7e471f0b5e97d11a9aab5218d47c3051d86703affcc071bcfb" exitCode=0 Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.808445 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-swff5" event={"ID":"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7","Type":"ContainerDied","Data":"9bf80d0bd69f0f7e471f0b5e97d11a9aab5218d47c3051d86703affcc071bcfb"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.808464 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-swff5" event={"ID":"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7","Type":"ContainerStarted","Data":"05590acc474b1726e393d3a841fde94a494c49a155bc464fa6436a899dd25e96"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.813144 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f2p2m" event={"ID":"4d5951d8-7653-4312-978c-f31522c6c282","Type":"ContainerStarted","Data":"23b250af8878a8aa2fc8ed928c780307176aa9f4310608076f6f1831cd7c9592"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.813238 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f2p2m" event={"ID":"4d5951d8-7653-4312-978c-f31522c6c282","Type":"ContainerStarted","Data":"32adc3370babc25f48f6500f2154affb6ec5eee8986eaaf549b82221bc856f6f"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.819540 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"9a2937f456c807fbc1e371634743729e3240fda80e5ee0fc470ac6bf33404433"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.819571 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"2c63d8f026900dad65664102024930644c707484f1d028a360925bb48b7ac464"} Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.879555 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-f2p2m" podStartSLOduration=1.8795396549999999 podStartE2EDuration="1.879539655s" podCreationTimestamp="2025-09-30 20:05:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:05:07.867621199 +0000 UTC m=+1109.806080017" watchObservedRunningTime="2025-09-30 20:05:07.879539655 +0000 UTC m=+1109.817998473" Sep 30 20:05:07 crc kubenswrapper[4603]: I0930 20:05:07.881536 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7rprt"] Sep 30 20:05:07 crc kubenswrapper[4603]: W0930 20:05:07.891883 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb221670d_0e9e_427c_845c_7c00c4566e64.slice/crio-3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783 WatchSource:0}: Error finding container 3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783: Status 404 returned error can't find the container with id 3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783 Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.441218 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.441617 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.441657 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.442331 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.442381 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7" gracePeriod=600 Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.836752 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7rprt" event={"ID":"b221670d-0e9e-427c-845c-7c00c4566e64","Type":"ContainerStarted","Data":"3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783"} Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.839565 4603 generic.go:334] "Generic (PLEG): container finished" podID="799d29f7-1992-4c4a-8f3e-dd18927238a2" containerID="bfaedb4545f6258ff662427fa9480cf705af306ace442c65872a6f86c7e3b73d" exitCode=0 Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.839765 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7jg7" event={"ID":"799d29f7-1992-4c4a-8f3e-dd18927238a2","Type":"ContainerDied","Data":"bfaedb4545f6258ff662427fa9480cf705af306ace442c65872a6f86c7e3b73d"} Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.844837 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7" exitCode=0 Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.845073 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7"} Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.845193 4603 scope.go:117] "RemoveContainer" containerID="a1bce1f259d9f06bdbadbe95e665ce32efb4a05154338c1705f7aea13c75ef51" Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.859996 4603 generic.go:334] "Generic (PLEG): container finished" podID="4d5951d8-7653-4312-978c-f31522c6c282" containerID="23b250af8878a8aa2fc8ed928c780307176aa9f4310608076f6f1831cd7c9592" exitCode=0 Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.860047 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f2p2m" event={"ID":"4d5951d8-7653-4312-978c-f31522c6c282","Type":"ContainerDied","Data":"23b250af8878a8aa2fc8ed928c780307176aa9f4310608076f6f1831cd7c9592"} Sep 30 20:05:08 crc kubenswrapper[4603]: I0930 20:05:08.872257 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"5d923a1dc5584e1f2202d6ea3de7080783f12399a331f066628cbd94f9d192f0"} Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.326899 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-swff5" Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.415441 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kj8r\" (UniqueName: \"kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r\") pod \"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7\" (UID: \"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7\") " Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.434319 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r" (OuterVolumeSpecName: "kube-api-access-6kj8r") pod "2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" (UID: "2023d167-8ee7-4bdc-ad0e-763b76c6a1d7"). InnerVolumeSpecName "kube-api-access-6kj8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.517503 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kj8r\" (UniqueName: \"kubernetes.io/projected/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7-kube-api-access-6kj8r\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.887496 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-swff5" Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.887527 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-swff5" event={"ID":"2023d167-8ee7-4bdc-ad0e-763b76c6a1d7","Type":"ContainerDied","Data":"05590acc474b1726e393d3a841fde94a494c49a155bc464fa6436a899dd25e96"} Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.887827 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05590acc474b1726e393d3a841fde94a494c49a155bc464fa6436a899dd25e96" Sep 30 20:05:09 crc kubenswrapper[4603]: I0930 20:05:09.919093 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156"} Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.596078 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.601709 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.641586 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmkdq\" (UniqueName: \"kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq\") pod \"4d5951d8-7653-4312-978c-f31522c6c282\" (UID: \"4d5951d8-7653-4312-978c-f31522c6c282\") " Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.641655 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7fd4\" (UniqueName: \"kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4\") pod \"799d29f7-1992-4c4a-8f3e-dd18927238a2\" (UID: \"799d29f7-1992-4c4a-8f3e-dd18927238a2\") " Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.660891 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4" (OuterVolumeSpecName: "kube-api-access-c7fd4") pod "799d29f7-1992-4c4a-8f3e-dd18927238a2" (UID: "799d29f7-1992-4c4a-8f3e-dd18927238a2"). InnerVolumeSpecName "kube-api-access-c7fd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.663157 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq" (OuterVolumeSpecName: "kube-api-access-wmkdq") pod "4d5951d8-7653-4312-978c-f31522c6c282" (UID: "4d5951d8-7653-4312-978c-f31522c6c282"). InnerVolumeSpecName "kube-api-access-wmkdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.743735 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmkdq\" (UniqueName: \"kubernetes.io/projected/4d5951d8-7653-4312-978c-f31522c6c282-kube-api-access-wmkdq\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.743761 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7fd4\" (UniqueName: \"kubernetes.io/projected/799d29f7-1992-4c4a-8f3e-dd18927238a2-kube-api-access-c7fd4\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.932687 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z7jg7" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.933677 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z7jg7" event={"ID":"799d29f7-1992-4c4a-8f3e-dd18927238a2","Type":"ContainerDied","Data":"3d2d3c30e71e2bc1e41dec27942aca7244b4ce5f16221fa5a03a49c20ffe84a4"} Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.933825 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d2d3c30e71e2bc1e41dec27942aca7244b4ce5f16221fa5a03a49c20ffe84a4" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.938436 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-f2p2m" event={"ID":"4d5951d8-7653-4312-978c-f31522c6c282","Type":"ContainerDied","Data":"32adc3370babc25f48f6500f2154affb6ec5eee8986eaaf549b82221bc856f6f"} Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.938469 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32adc3370babc25f48f6500f2154affb6ec5eee8986eaaf549b82221bc856f6f" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.938526 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-f2p2m" Sep 30 20:05:10 crc kubenswrapper[4603]: I0930 20:05:10.946738 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"96146d4e92cf4b2df9335626281b0267248be2695b5ed47d4af228788e354336"} Sep 30 20:05:11 crc kubenswrapper[4603]: I0930 20:05:11.957972 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"b1f8be4b544bba4241b6419d0f4ca47d5091ddd2d51377c92a8fdd5738acabea"} Sep 30 20:05:11 crc kubenswrapper[4603]: I0930 20:05:11.958421 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"8beab1646c48af2b1d2db1fc64d8924c0e72c1ef29830a78e2e5452be6ca9419"} Sep 30 20:05:11 crc kubenswrapper[4603]: I0930 20:05:11.958438 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"98edcab2ef7ff258817efc980c082a4d78c0cc2a437fa2bc0f00af93d82853c3"} Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.287329 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-df76-account-create-9qz69"] Sep 30 20:05:16 crc kubenswrapper[4603]: E0930 20:05:16.287910 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d5951d8-7653-4312-978c-f31522c6c282" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288154 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d5951d8-7653-4312-978c-f31522c6c282" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: E0930 20:05:16.288187 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288193 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: E0930 20:05:16.288217 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="799d29f7-1992-4c4a-8f3e-dd18927238a2" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288223 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="799d29f7-1992-4c4a-8f3e-dd18927238a2" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288400 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d5951d8-7653-4312-978c-f31522c6c282" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288433 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288463 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="799d29f7-1992-4c4a-8f3e-dd18927238a2" containerName="mariadb-database-create" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.288977 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.292416 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.300651 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-df76-account-create-9qz69"] Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.354820 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz4dg\" (UniqueName: \"kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg\") pod \"barbican-df76-account-create-9qz69\" (UID: \"03487c1c-ba57-440c-a854-1c0238e51bfb\") " pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.456485 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz4dg\" (UniqueName: \"kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg\") pod \"barbican-df76-account-create-9qz69\" (UID: \"03487c1c-ba57-440c-a854-1c0238e51bfb\") " pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.481693 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz4dg\" (UniqueName: \"kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg\") pod \"barbican-df76-account-create-9qz69\" (UID: \"03487c1c-ba57-440c-a854-1c0238e51bfb\") " pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:16 crc kubenswrapper[4603]: I0930 20:05:16.626753 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:23 crc kubenswrapper[4603]: E0930 20:05:23.216818 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-swift-object:current-podified" Sep 30 20:05:23 crc kubenswrapper[4603]: E0930 20:05:23.217541 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:object-server,Image:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,Command:[/usr/bin/swift-object-server /etc/swift/object-server.conf.d -v],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:object,HostPort:0,ContainerPort:6200,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b7h56h9dh94h67bh697h95h55hbh555h556h675h5fdh57dh579h5fbh64fh5c9h687hb6h678h5d4h549h54h98h8ch564h5bh5bch55dhc8hf8q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:swift,ReadOnly:false,MountPath:/srv/node/pv,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cache,ReadOnly:false,MountPath:/var/cache/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:lock,ReadOnly:false,MountPath:/var/lock,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rjtkr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-storage-0_openstack(c71b4eca-ba52-40ee-88e6-f0b50794825d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:05:24 crc kubenswrapper[4603]: E0930 20:05:23.542368 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"object-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"object-replicator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-auditor\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-updater\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"rsync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"swift-recon-cron\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\"]" pod="openstack/swift-storage-0" podUID="c71b4eca-ba52-40ee-88e6-f0b50794825d" Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.079051 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-gb2nj" event={"ID":"4255d3b6-48b0-4a39-8991-bd70191f02ee","Type":"ContainerStarted","Data":"fff0e3ddc8c18a9925865d4dfd15879c17f595cdf49f1421d2851e4e0734e809"} Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.092522 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7rprt" event={"ID":"b221670d-0e9e-427c-845c-7c00c4566e64","Type":"ContainerStarted","Data":"f7081fc457ee52b17531bbe0dfc3ae102d4c46a12f943a21c15e29f118e0ab72"} Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.112303 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-gb2nj" podStartSLOduration=2.331503912 podStartE2EDuration="20.11228411s" podCreationTimestamp="2025-09-30 20:05:04 +0000 UTC" firstStartedPulling="2025-09-30 20:05:05.512917661 +0000 UTC m=+1107.451376479" lastFinishedPulling="2025-09-30 20:05:23.293697859 +0000 UTC m=+1125.232156677" observedRunningTime="2025-09-30 20:05:24.103977939 +0000 UTC m=+1126.042436757" watchObservedRunningTime="2025-09-30 20:05:24.11228411 +0000 UTC m=+1126.050742948" Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.112586 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"f42dd371f14c5050ddf8ead5195499813134dd77a76b96d15f0d2be51157149b"} Sep 30 20:05:24 crc kubenswrapper[4603]: E0930 20:05:24.118705 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"object-server\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-replicator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-auditor\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-updater\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"rsync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"swift-recon-cron\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\"]" pod="openstack/swift-storage-0" podUID="c71b4eca-ba52-40ee-88e6-f0b50794825d" Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.132663 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-7rprt" podStartSLOduration=2.763286997 podStartE2EDuration="18.132644571s" podCreationTimestamp="2025-09-30 20:05:06 +0000 UTC" firstStartedPulling="2025-09-30 20:05:07.895279943 +0000 UTC m=+1109.833738761" lastFinishedPulling="2025-09-30 20:05:23.264637497 +0000 UTC m=+1125.203096335" observedRunningTime="2025-09-30 20:05:24.127484933 +0000 UTC m=+1126.065943771" watchObservedRunningTime="2025-09-30 20:05:24.132644571 +0000 UTC m=+1126.071103389" Sep 30 20:05:24 crc kubenswrapper[4603]: I0930 20:05:24.378544 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-df76-account-create-9qz69"] Sep 30 20:05:24 crc kubenswrapper[4603]: E0930 20:05:24.991691 4603 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03487c1c_ba57_440c_a854_1c0238e51bfb.slice/crio-b705fe0525130499ba74e114a6078011fd8a5c91df23767b36376d71764e5fc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03487c1c_ba57_440c_a854_1c0238e51bfb.slice/crio-conmon-b705fe0525130499ba74e114a6078011fd8a5c91df23767b36376d71764e5fc4.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:05:25 crc kubenswrapper[4603]: I0930 20:05:25.122891 4603 generic.go:334] "Generic (PLEG): container finished" podID="03487c1c-ba57-440c-a854-1c0238e51bfb" containerID="b705fe0525130499ba74e114a6078011fd8a5c91df23767b36376d71764e5fc4" exitCode=0 Sep 30 20:05:25 crc kubenswrapper[4603]: I0930 20:05:25.122951 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df76-account-create-9qz69" event={"ID":"03487c1c-ba57-440c-a854-1c0238e51bfb","Type":"ContainerDied","Data":"b705fe0525130499ba74e114a6078011fd8a5c91df23767b36376d71764e5fc4"} Sep 30 20:05:25 crc kubenswrapper[4603]: I0930 20:05:25.122986 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df76-account-create-9qz69" event={"ID":"03487c1c-ba57-440c-a854-1c0238e51bfb","Type":"ContainerStarted","Data":"767672dbdd1f5fdb8f363557acb3b710012c9ac7dbca1100e8a7075d5dfb253a"} Sep 30 20:05:25 crc kubenswrapper[4603]: E0930 20:05:25.135810 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"object-server\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-replicator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-auditor\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"object-updater\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"rsync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\", failed to \"StartContainer\" for \"swift-recon-cron\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-object:current-podified\\\"\"]" pod="openstack/swift-storage-0" podUID="c71b4eca-ba52-40ee-88e6-f0b50794825d" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.345025 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0f43-account-create-cl6t4"] Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.347691 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.350057 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.355951 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0f43-account-create-cl6t4"] Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.451175 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqnkg\" (UniqueName: \"kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg\") pod \"cinder-0f43-account-create-cl6t4\" (UID: \"2c26e760-1c76-4f97-9df8-101bdf01d8ec\") " pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.484878 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.514818 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7690-account-create-hfkfp"] Sep 30 20:05:26 crc kubenswrapper[4603]: E0930 20:05:26.515280 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03487c1c-ba57-440c-a854-1c0238e51bfb" containerName="mariadb-account-create" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.515302 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="03487c1c-ba57-440c-a854-1c0238e51bfb" containerName="mariadb-account-create" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.515460 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="03487c1c-ba57-440c-a854-1c0238e51bfb" containerName="mariadb-account-create" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.516040 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.517965 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.525104 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7690-account-create-hfkfp"] Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.553171 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz4dg\" (UniqueName: \"kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg\") pod \"03487c1c-ba57-440c-a854-1c0238e51bfb\" (UID: \"03487c1c-ba57-440c-a854-1c0238e51bfb\") " Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.553519 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js6v6\" (UniqueName: \"kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6\") pod \"neutron-7690-account-create-hfkfp\" (UID: \"cef5d2f6-4ebf-4473-9e7e-87904a61af9b\") " pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.553585 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqnkg\" (UniqueName: \"kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg\") pod \"cinder-0f43-account-create-cl6t4\" (UID: \"2c26e760-1c76-4f97-9df8-101bdf01d8ec\") " pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.561848 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg" (OuterVolumeSpecName: "kube-api-access-xz4dg") pod "03487c1c-ba57-440c-a854-1c0238e51bfb" (UID: "03487c1c-ba57-440c-a854-1c0238e51bfb"). InnerVolumeSpecName "kube-api-access-xz4dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.575207 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqnkg\" (UniqueName: \"kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg\") pod \"cinder-0f43-account-create-cl6t4\" (UID: \"2c26e760-1c76-4f97-9df8-101bdf01d8ec\") " pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.655043 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js6v6\" (UniqueName: \"kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6\") pod \"neutron-7690-account-create-hfkfp\" (UID: \"cef5d2f6-4ebf-4473-9e7e-87904a61af9b\") " pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.655416 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz4dg\" (UniqueName: \"kubernetes.io/projected/03487c1c-ba57-440c-a854-1c0238e51bfb-kube-api-access-xz4dg\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.670625 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js6v6\" (UniqueName: \"kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6\") pod \"neutron-7690-account-create-hfkfp\" (UID: \"cef5d2f6-4ebf-4473-9e7e-87904a61af9b\") " pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.676587 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:26 crc kubenswrapper[4603]: I0930 20:05:26.838475 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.114340 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0f43-account-create-cl6t4"] Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.144948 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0f43-account-create-cl6t4" event={"ID":"2c26e760-1c76-4f97-9df8-101bdf01d8ec","Type":"ContainerStarted","Data":"96912fed5bcc1e42b01f5d057f5cded004fd10abcab89661e41bdd98722f7a40"} Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.146921 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-df76-account-create-9qz69" event={"ID":"03487c1c-ba57-440c-a854-1c0238e51bfb","Type":"ContainerDied","Data":"767672dbdd1f5fdb8f363557acb3b710012c9ac7dbca1100e8a7075d5dfb253a"} Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.146949 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="767672dbdd1f5fdb8f363557acb3b710012c9ac7dbca1100e8a7075d5dfb253a" Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.146984 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-df76-account-create-9qz69" Sep 30 20:05:27 crc kubenswrapper[4603]: I0930 20:05:27.280613 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7690-account-create-hfkfp"] Sep 30 20:05:27 crc kubenswrapper[4603]: W0930 20:05:27.281763 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcef5d2f6_4ebf_4473_9e7e_87904a61af9b.slice/crio-6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb WatchSource:0}: Error finding container 6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb: Status 404 returned error can't find the container with id 6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb Sep 30 20:05:28 crc kubenswrapper[4603]: I0930 20:05:28.157789 4603 generic.go:334] "Generic (PLEG): container finished" podID="2c26e760-1c76-4f97-9df8-101bdf01d8ec" containerID="650b3a5f2c5d23a260cc4071fbb5e31f930a45034cf67466a19f1cb82e9ab21c" exitCode=0 Sep 30 20:05:28 crc kubenswrapper[4603]: I0930 20:05:28.157836 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0f43-account-create-cl6t4" event={"ID":"2c26e760-1c76-4f97-9df8-101bdf01d8ec","Type":"ContainerDied","Data":"650b3a5f2c5d23a260cc4071fbb5e31f930a45034cf67466a19f1cb82e9ab21c"} Sep 30 20:05:28 crc kubenswrapper[4603]: I0930 20:05:28.161150 4603 generic.go:334] "Generic (PLEG): container finished" podID="cef5d2f6-4ebf-4473-9e7e-87904a61af9b" containerID="55a91bbb098b668967de61f6bed823ab586b2624a7ff7310a788f38266e65468" exitCode=0 Sep 30 20:05:28 crc kubenswrapper[4603]: I0930 20:05:28.161214 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7690-account-create-hfkfp" event={"ID":"cef5d2f6-4ebf-4473-9e7e-87904a61af9b","Type":"ContainerDied","Data":"55a91bbb098b668967de61f6bed823ab586b2624a7ff7310a788f38266e65468"} Sep 30 20:05:28 crc kubenswrapper[4603]: I0930 20:05:28.161249 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7690-account-create-hfkfp" event={"ID":"cef5d2f6-4ebf-4473-9e7e-87904a61af9b","Type":"ContainerStarted","Data":"6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb"} Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.510814 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.531562 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.611401 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqnkg\" (UniqueName: \"kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg\") pod \"2c26e760-1c76-4f97-9df8-101bdf01d8ec\" (UID: \"2c26e760-1c76-4f97-9df8-101bdf01d8ec\") " Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.611737 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js6v6\" (UniqueName: \"kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6\") pod \"cef5d2f6-4ebf-4473-9e7e-87904a61af9b\" (UID: \"cef5d2f6-4ebf-4473-9e7e-87904a61af9b\") " Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.616860 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg" (OuterVolumeSpecName: "kube-api-access-pqnkg") pod "2c26e760-1c76-4f97-9df8-101bdf01d8ec" (UID: "2c26e760-1c76-4f97-9df8-101bdf01d8ec"). InnerVolumeSpecName "kube-api-access-pqnkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.617342 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6" (OuterVolumeSpecName: "kube-api-access-js6v6") pod "cef5d2f6-4ebf-4473-9e7e-87904a61af9b" (UID: "cef5d2f6-4ebf-4473-9e7e-87904a61af9b"). InnerVolumeSpecName "kube-api-access-js6v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.713633 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqnkg\" (UniqueName: \"kubernetes.io/projected/2c26e760-1c76-4f97-9df8-101bdf01d8ec-kube-api-access-pqnkg\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:29 crc kubenswrapper[4603]: I0930 20:05:29.713927 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js6v6\" (UniqueName: \"kubernetes.io/projected/cef5d2f6-4ebf-4473-9e7e-87904a61af9b-kube-api-access-js6v6\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.182797 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7690-account-create-hfkfp" event={"ID":"cef5d2f6-4ebf-4473-9e7e-87904a61af9b","Type":"ContainerDied","Data":"6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb"} Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.182844 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7690-account-create-hfkfp" Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.182848 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ac5547b54370f7841587c19914456835b3dfec13eda9ca6130b05f57d29e1eb" Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.184931 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0f43-account-create-cl6t4" event={"ID":"2c26e760-1c76-4f97-9df8-101bdf01d8ec","Type":"ContainerDied","Data":"96912fed5bcc1e42b01f5d057f5cded004fd10abcab89661e41bdd98722f7a40"} Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.184958 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96912fed5bcc1e42b01f5d057f5cded004fd10abcab89661e41bdd98722f7a40" Sep 30 20:05:30 crc kubenswrapper[4603]: I0930 20:05:30.185009 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0f43-account-create-cl6t4" Sep 30 20:05:34 crc kubenswrapper[4603]: I0930 20:05:34.220997 4603 generic.go:334] "Generic (PLEG): container finished" podID="b221670d-0e9e-427c-845c-7c00c4566e64" containerID="f7081fc457ee52b17531bbe0dfc3ae102d4c46a12f943a21c15e29f118e0ab72" exitCode=0 Sep 30 20:05:34 crc kubenswrapper[4603]: I0930 20:05:34.221122 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7rprt" event={"ID":"b221670d-0e9e-427c-845c-7c00c4566e64","Type":"ContainerDied","Data":"f7081fc457ee52b17531bbe0dfc3ae102d4c46a12f943a21c15e29f118e0ab72"} Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.524155 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.621184 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data\") pod \"b221670d-0e9e-427c-845c-7c00c4566e64\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.621651 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle\") pod \"b221670d-0e9e-427c-845c-7c00c4566e64\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.621826 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb8gk\" (UniqueName: \"kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk\") pod \"b221670d-0e9e-427c-845c-7c00c4566e64\" (UID: \"b221670d-0e9e-427c-845c-7c00c4566e64\") " Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.655279 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk" (OuterVolumeSpecName: "kube-api-access-xb8gk") pod "b221670d-0e9e-427c-845c-7c00c4566e64" (UID: "b221670d-0e9e-427c-845c-7c00c4566e64"). InnerVolumeSpecName "kube-api-access-xb8gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.699410 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b221670d-0e9e-427c-845c-7c00c4566e64" (UID: "b221670d-0e9e-427c-845c-7c00c4566e64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.727387 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb8gk\" (UniqueName: \"kubernetes.io/projected/b221670d-0e9e-427c-845c-7c00c4566e64-kube-api-access-xb8gk\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.727427 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.752330 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data" (OuterVolumeSpecName: "config-data") pod "b221670d-0e9e-427c-845c-7c00c4566e64" (UID: "b221670d-0e9e-427c-845c-7c00c4566e64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:05:35 crc kubenswrapper[4603]: I0930 20:05:35.829082 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b221670d-0e9e-427c-845c-7c00c4566e64-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.238920 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7rprt" event={"ID":"b221670d-0e9e-427c-845c-7c00c4566e64","Type":"ContainerDied","Data":"3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783"} Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.238967 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3622ad0ce413bce1d5f288eda2ef8c2afa9a00ca07b2e59283b94a4d6c967783" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.239047 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7rprt" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.571240 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-t6zkm"] Sep 30 20:05:36 crc kubenswrapper[4603]: E0930 20:05:36.572101 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b221670d-0e9e-427c-845c-7c00c4566e64" containerName="keystone-db-sync" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572120 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b221670d-0e9e-427c-845c-7c00c4566e64" containerName="keystone-db-sync" Sep 30 20:05:36 crc kubenswrapper[4603]: E0930 20:05:36.572145 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c26e760-1c76-4f97-9df8-101bdf01d8ec" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572152 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c26e760-1c76-4f97-9df8-101bdf01d8ec" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: E0930 20:05:36.572184 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cef5d2f6-4ebf-4473-9e7e-87904a61af9b" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572191 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cef5d2f6-4ebf-4473-9e7e-87904a61af9b" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572513 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c26e760-1c76-4f97-9df8-101bdf01d8ec" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572541 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="b221670d-0e9e-427c-845c-7c00c4566e64" containerName="keystone-db-sync" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.572565 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="cef5d2f6-4ebf-4473-9e7e-87904a61af9b" containerName="mariadb-account-create" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.573359 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.576416 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.576653 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.576854 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hqmtj" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.577104 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.640529 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.641928 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646505 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646572 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52cvf\" (UniqueName: \"kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646607 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646631 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646931 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.646981 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.657061 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.678288 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-t6zkm"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750396 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750435 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750470 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52cvf\" (UniqueName: \"kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750491 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp2h4\" (UniqueName: \"kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750514 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750535 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750591 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750617 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750641 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750662 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.750683 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.757660 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.759308 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.770077 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.770636 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.774535 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.776078 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.780967 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.818459 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.818832 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.818652 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.818692 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-59w9t" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.827336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52cvf\" (UniqueName: \"kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf\") pod \"keystone-bootstrap-t6zkm\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.829773 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857452 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857523 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857561 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857585 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857624 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857714 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp2h4\" (UniqueName: \"kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857794 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857834 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj98w\" (UniqueName: \"kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857855 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.857876 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.858761 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.860302 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.861516 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.862471 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.903627 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.937586 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.939587 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.945515 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.945766 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.946531 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp2h4\" (UniqueName: \"kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4\") pod \"dnsmasq-dns-5c9d85d47c-fzjtf\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960278 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960329 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960359 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmfrl\" (UniqueName: \"kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960383 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960403 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj98w\" (UniqueName: \"kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960425 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960469 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960498 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960515 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960543 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960560 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.960579 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.964740 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.966463 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.966883 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.979200 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:36 crc kubenswrapper[4603]: I0930 20:05:36.992242 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.009244 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj98w\" (UniqueName: \"kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w\") pod \"horizon-6d99cfbf7-25x78\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.014289 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067046 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067113 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067149 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067200 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067219 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067280 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067305 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmfrl\" (UniqueName: \"kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.067651 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-cgrnb"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.068812 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.073621 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9nwz8" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.073835 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.074038 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.074225 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.075463 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.075574 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.084815 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.089038 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.089228 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.098463 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.101557 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmfrl\" (UniqueName: \"kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl\") pod \"ceilometer-0\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.162258 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cgrnb"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.175449 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.176214 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5nmq\" (UniqueName: \"kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.176311 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.176402 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.176471 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.176593 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.195256 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.196651 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.212794 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.221653 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.276494 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-2m748"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278077 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278127 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278188 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfrpj\" (UniqueName: \"kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278214 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278261 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278290 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278321 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278345 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5nmq\" (UniqueName: \"kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278376 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278403 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278419 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.278677 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.283864 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.284292 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-cl7gf" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.284408 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.284618 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.284724 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.292687 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.296474 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.298477 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.304808 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.305588 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.307623 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.317833 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5nmq\" (UniqueName: \"kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq\") pod \"cinder-db-sync-cgrnb\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.320277 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2m748"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.366692 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4265b"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.368023 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.371539 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.371962 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.372119 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dzdhv" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.379901 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.379938 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.379966 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.379998 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380019 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380040 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380061 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380094 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfrpj\" (UniqueName: \"kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380124 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380151 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380185 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4l5q\" (UniqueName: \"kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380205 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380231 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380249 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d44z7\" (UniqueName: \"kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380269 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380295 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh6p5\" (UniqueName: \"kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380315 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.380329 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.389002 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.398024 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.398102 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-t94b6"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.398408 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.398518 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.399484 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.401682 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.402007 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ktchh" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.408828 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.417137 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfrpj\" (UniqueName: \"kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj\") pod \"dnsmasq-dns-6ffb94d8ff-dnxql\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.432903 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.436286 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t94b6"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.448662 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4265b"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488101 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4l5q\" (UniqueName: \"kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488143 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488187 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488208 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d44z7\" (UniqueName: \"kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488225 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488243 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh6p5\" (UniqueName: \"kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488260 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488273 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488308 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488334 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488352 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.488387 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.494206 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.498375 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.524321 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.524592 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.524883 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.525084 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.525721 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.531241 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4l5q\" (UniqueName: \"kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.532376 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.538897 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.559612 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh6p5\" (UniqueName: \"kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5\") pod \"neutron-db-sync-4265b\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.560526 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.561664 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key\") pod \"horizon-6f6d6f87cc-sq4cs\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.573277 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d44z7\" (UniqueName: \"kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7\") pod \"placement-db-sync-2m748\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.595111 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.596085 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2lbc\" (UniqueName: \"kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.596189 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.609089 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4265b" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.671925 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2m748" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.698256 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.699382 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2lbc\" (UniqueName: \"kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.699436 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.710606 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.712656 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.721021 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2lbc\" (UniqueName: \"kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc\") pod \"barbican-db-sync-t94b6\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.724682 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.809993 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.841134 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-t6zkm"] Sep 30 20:05:37 crc kubenswrapper[4603]: I0930 20:05:37.925647 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t94b6" Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.000313 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.007097 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:05:38 crc kubenswrapper[4603]: W0930 20:05:38.030082 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f3879b3_8f74_461c_9863_38b454275d08.slice/crio-4b9e4d8d1ea40e2d464175b45e3ce681856418e410397fa4439ed9ab3088ad1c WatchSource:0}: Error finding container 4b9e4d8d1ea40e2d464175b45e3ce681856418e410397fa4439ed9ab3088ad1c: Status 404 returned error can't find the container with id 4b9e4d8d1ea40e2d464175b45e3ce681856418e410397fa4439ed9ab3088ad1c Sep 30 20:05:38 crc kubenswrapper[4603]: W0930 20:05:38.050863 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29b4231f_d9d3_4ac4_ba39_f6d9ea962724.slice/crio-987d3d250649eef71f6e42c98bb618a052b08e5140634f696ddfc5fc0a1e8d7e WatchSource:0}: Error finding container 987d3d250649eef71f6e42c98bb618a052b08e5140634f696ddfc5fc0a1e8d7e: Status 404 returned error can't find the container with id 987d3d250649eef71f6e42c98bb618a052b08e5140634f696ddfc5fc0a1e8d7e Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.366318 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" event={"ID":"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443","Type":"ContainerStarted","Data":"fdcbd429060aa78b8b83acf4e13043e3925738fa832441609dc7cb37b70000e8"} Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.380430 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d99cfbf7-25x78" event={"ID":"9f3879b3-8f74-461c-9863-38b454275d08","Type":"ContainerStarted","Data":"4b9e4d8d1ea40e2d464175b45e3ce681856418e410397fa4439ed9ab3088ad1c"} Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.385992 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6zkm" event={"ID":"9947ef36-b071-4df8-992a-6f7894bb6daf","Type":"ContainerStarted","Data":"78d787cba58cabc28321ba2d5726ccf2cc38042b9c38cd1186b58f29766fb1dd"} Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.389421 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerStarted","Data":"987d3d250649eef71f6e42c98bb618a052b08e5140634f696ddfc5fc0a1e8d7e"} Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.721835 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.904288 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.928657 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4265b"] Sep 30 20:05:38 crc kubenswrapper[4603]: I0930 20:05:38.992295 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-cgrnb"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.085528 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2m748"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.196001 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t94b6"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.225728 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.245608 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.248912 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.269026 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.284545 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.365319 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.366130 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.366205 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.366357 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b74ms\" (UniqueName: \"kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.366534 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.404686 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t94b6" event={"ID":"49895f67-376c-4c08-9382-18aee2212e04","Type":"ContainerStarted","Data":"912773e710556bfb6a4dc40a2f23ca0047473754f8cd2105709655f98804eba7"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.410837 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2m748" event={"ID":"be703fd8-b3d7-4462-a905-5a835f8e2125","Type":"ContainerStarted","Data":"6eed94f661bb8a763b6b2de3823cad8fc413fb90ea7d5b38077ce76f55ec8cd9"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.413691 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" event={"ID":"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d","Type":"ContainerStarted","Data":"343804cd66bf330c9a8e493d4d3746fc4866df49c68cb96b9c65f30bde608dd6"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.423863 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4265b" event={"ID":"a86c5953-cb61-4f11-b581-eb7698adf7ec","Type":"ContainerStarted","Data":"ea825d917bfeba0203b60bbc0f3daf96dfe75625255c235f05c9e5ec2f9e945e"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.458107 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"91303284ac15296228a9160a4dffee234ec33fc11728d928ec7481872de8bd7b"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.459922 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f6d6f87cc-sq4cs" event={"ID":"9f897b15-4e3c-4f3c-8164-a8b95dea1601","Type":"ContainerStarted","Data":"72cb7730ce2e050c68ce3e65e454c420510d189ae396878f1ca1d57e9db9c92b"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.464348 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cgrnb" event={"ID":"7a87079f-7cb1-447c-a950-bb204031afce","Type":"ContainerStarted","Data":"71f33a214dab7ddbb7391efe839906d8d4c7c572e72802f2a35b2f11034608c4"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.468041 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b74ms\" (UniqueName: \"kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.468202 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.468310 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.468348 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.468370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.471831 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.473214 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.478938 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.490703 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.493537 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6zkm" event={"ID":"9947ef36-b071-4df8-992a-6f7894bb6daf","Type":"ContainerStarted","Data":"e52ed40163cd1aff1eb59426b1bc695c43b7b8c073ce298029696eb02cbac8d7"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.496199 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b74ms\" (UniqueName: \"kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms\") pod \"horizon-787895f79-ldrlh\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.503660 4603 generic.go:334] "Generic (PLEG): container finished" podID="c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" containerID="6b30d0334409e1969cc552eb2d7f39e3d577913d27cceb52ed81f1478862e525" exitCode=0 Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.503704 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" event={"ID":"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443","Type":"ContainerDied","Data":"6b30d0334409e1969cc552eb2d7f39e3d577913d27cceb52ed81f1478862e525"} Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.518942 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-t6zkm" podStartSLOduration=3.518928384 podStartE2EDuration="3.518928384s" podCreationTimestamp="2025-09-30 20:05:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:05:39.51278069 +0000 UTC m=+1141.451239528" watchObservedRunningTime="2025-09-30 20:05:39.518928384 +0000 UTC m=+1141.457387202" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.655652 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.901745 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.983052 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config\") pod \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.984057 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb\") pod \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.984113 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qp2h4\" (UniqueName: \"kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4\") pod \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.984427 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc\") pod \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.984548 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb\") pod \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\" (UID: \"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443\") " Sep 30 20:05:39 crc kubenswrapper[4603]: I0930 20:05:39.996516 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4" (OuterVolumeSpecName: "kube-api-access-qp2h4") pod "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" (UID: "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443"). InnerVolumeSpecName "kube-api-access-qp2h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.008884 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config" (OuterVolumeSpecName: "config") pod "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" (UID: "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.015343 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" (UID: "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.032545 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" (UID: "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.043944 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" (UID: "c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.087787 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.087982 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qp2h4\" (UniqueName: \"kubernetes.io/projected/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-kube-api-access-qp2h4\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.088039 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.088109 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.088173 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.215738 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.542522 4603 generic.go:334] "Generic (PLEG): container finished" podID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerID="114ed701f26ade4954dfa4ed42409df5b2f8e0817855c578a4c60189304b0a1a" exitCode=0 Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.542577 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" event={"ID":"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d","Type":"ContainerDied","Data":"114ed701f26ade4954dfa4ed42409df5b2f8e0817855c578a4c60189304b0a1a"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.549217 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4265b" event={"ID":"a86c5953-cb61-4f11-b581-eb7698adf7ec","Type":"ContainerStarted","Data":"202e659da32a9e682700e9f14341f18f494de34a1c84c1b04b4984e931ad9ca9"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.611739 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4265b" podStartSLOduration=3.611724947 podStartE2EDuration="3.611724947s" podCreationTimestamp="2025-09-30 20:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:05:40.598714951 +0000 UTC m=+1142.537173769" watchObservedRunningTime="2025-09-30 20:05:40.611724947 +0000 UTC m=+1142.550183765" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.661764 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"078b59e720ddb9c3a73031ef06ed86861caedf1968ec0f757742778278e7250d"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.661813 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"6e0702149aa990c8329358a567859570dabed3e07a10b39ac74d4ddeb9a48839"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.692650 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-787895f79-ldrlh" event={"ID":"12e73686-8083-4128-afd6-84cd7fa8843f","Type":"ContainerStarted","Data":"f0b6d609a7dbdf40bf49dcc9c1d97f919c4608f73861a92a5d3bfbf31d0faeec"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.715716 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.715795 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-fzjtf" event={"ID":"c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443","Type":"ContainerDied","Data":"fdcbd429060aa78b8b83acf4e13043e3925738fa832441609dc7cb37b70000e8"} Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.715832 4603 scope.go:117] "RemoveContainer" containerID="6b30d0334409e1969cc552eb2d7f39e3d577913d27cceb52ed81f1478862e525" Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.873570 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:40 crc kubenswrapper[4603]: I0930 20:05:40.899394 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-fzjtf"] Sep 30 20:05:41 crc kubenswrapper[4603]: I0930 20:05:41.725147 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" event={"ID":"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d","Type":"ContainerStarted","Data":"fa544ff80f03c62af9c3a06942e9196e145b4df440469850b4f3aa52d0b69604"} Sep 30 20:05:41 crc kubenswrapper[4603]: I0930 20:05:41.726288 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:41 crc kubenswrapper[4603]: I0930 20:05:41.746311 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" podStartSLOduration=4.74627813 podStartE2EDuration="4.74627813s" podCreationTimestamp="2025-09-30 20:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:05:41.745019766 +0000 UTC m=+1143.683478574" watchObservedRunningTime="2025-09-30 20:05:41.74627813 +0000 UTC m=+1143.684736948" Sep 30 20:05:41 crc kubenswrapper[4603]: I0930 20:05:41.751408 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"b23df3bb9038c8988c37c7880c610cb4d9225dd36467ef6ddbd8f15332524cc2"} Sep 30 20:05:42 crc kubenswrapper[4603]: I0930 20:05:42.799157 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" path="/var/lib/kubelet/pods/c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443/volumes" Sep 30 20:05:42 crc kubenswrapper[4603]: I0930 20:05:42.800548 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"bd13acf3c00a1b821d23e0e1da41d29e25207c4b5f983c1a519c1ce1c766414d"} Sep 30 20:05:42 crc kubenswrapper[4603]: I0930 20:05:42.800602 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c71b4eca-ba52-40ee-88e6-f0b50794825d","Type":"ContainerStarted","Data":"d70c9e76b949e81b7168d1e407aabefa3b59843430cc960b2c3729724e101ab3"} Sep 30 20:05:43 crc kubenswrapper[4603]: I0930 20:05:43.822833 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=40.574672905 podStartE2EDuration="1m13.82281587s" podCreationTimestamp="2025-09-30 20:04:30 +0000 UTC" firstStartedPulling="2025-09-30 20:05:04.88861471 +0000 UTC m=+1106.827073538" lastFinishedPulling="2025-09-30 20:05:38.136757685 +0000 UTC m=+1140.075216503" observedRunningTime="2025-09-30 20:05:43.817521719 +0000 UTC m=+1145.755980557" watchObservedRunningTime="2025-09-30 20:05:43.82281587 +0000 UTC m=+1145.761274688" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.084229 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.084523 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="dnsmasq-dns" containerID="cri-o://fa544ff80f03c62af9c3a06942e9196e145b4df440469850b4f3aa52d0b69604" gracePeriod=10 Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.130332 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:05:44 crc kubenswrapper[4603]: E0930 20:05:44.130702 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" containerName="init" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.130713 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" containerName="init" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.130866 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36e2fcb-f39c-4bbc-b0e3-96ede2e2e443" containerName="init" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.131715 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.137770 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.161892 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209204 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn2rl\" (UniqueName: \"kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209250 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209284 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209305 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209327 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.209362 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310748 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn2rl\" (UniqueName: \"kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310801 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310842 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310866 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310892 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.310938 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.311907 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.312923 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.313564 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.315605 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.316185 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.335639 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn2rl\" (UniqueName: \"kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl\") pod \"dnsmasq-dns-fcfdd6f9f-sfsw5\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.455929 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.799490 4603 generic.go:334] "Generic (PLEG): container finished" podID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerID="fa544ff80f03c62af9c3a06942e9196e145b4df440469850b4f3aa52d0b69604" exitCode=0 Sep 30 20:05:44 crc kubenswrapper[4603]: I0930 20:05:44.799568 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" event={"ID":"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d","Type":"ContainerDied","Data":"fa544ff80f03c62af9c3a06942e9196e145b4df440469850b4f3aa52d0b69604"} Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.746342 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.809846 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.811798 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.814493 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.836834 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.873120 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.920798 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7cc565dc7d-zt9pz"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.922568 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.930636 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cc565dc7d-zt9pz"] Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.940801 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.940876 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.940971 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.941000 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.941066 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.941087 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:45 crc kubenswrapper[4603]: I0930 20:05:45.941138 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s72jt\" (UniqueName: \"kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045158 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-config-data\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045210 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-combined-ca-bundle\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045248 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-secret-key\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045268 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045287 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045308 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-scripts\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045331 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-tls-certs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045379 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045397 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045417 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53799743-167b-4a74-9cab-3e591a04391b-logs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045450 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s72jt\" (UniqueName: \"kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045474 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045510 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.045532 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd57z\" (UniqueName: \"kubernetes.io/projected/53799743-167b-4a74-9cab-3e591a04391b-kube-api-access-zd57z\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.046706 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.047459 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.047921 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.053880 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.056728 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.057216 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.062481 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s72jt\" (UniqueName: \"kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt\") pod \"horizon-8575cd6744-wt57f\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152472 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-config-data\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152524 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-combined-ca-bundle\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152630 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-secret-key\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152691 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-scripts\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152733 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-tls-certs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.152895 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53799743-167b-4a74-9cab-3e591a04391b-logs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.153644 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-config-data\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.153754 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53799743-167b-4a74-9cab-3e591a04391b-scripts\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.154493 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd57z\" (UniqueName: \"kubernetes.io/projected/53799743-167b-4a74-9cab-3e591a04391b-kube-api-access-zd57z\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.154549 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53799743-167b-4a74-9cab-3e591a04391b-logs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.157941 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-combined-ca-bundle\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.158478 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-secret-key\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.172491 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd57z\" (UniqueName: \"kubernetes.io/projected/53799743-167b-4a74-9cab-3e591a04391b-kube-api-access-zd57z\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.175786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/53799743-167b-4a74-9cab-3e591a04391b-horizon-tls-certs\") pod \"horizon-7cc565dc7d-zt9pz\" (UID: \"53799743-167b-4a74-9cab-3e591a04391b\") " pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.245271 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.270826 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.422229 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.737067 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:05:46 crc kubenswrapper[4603]: W0930 20:05:46.744494 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode53c6d5a_9a76_4d2e_b821_68c74620f22b.slice/crio-c5e5db1a377e4b952c66a2cb0b717fb855832f51a8d3736e9e51bab750b790f7 WatchSource:0}: Error finding container c5e5db1a377e4b952c66a2cb0b717fb855832f51a8d3736e9e51bab750b790f7: Status 404 returned error can't find the container with id c5e5db1a377e4b952c66a2cb0b717fb855832f51a8d3736e9e51bab750b790f7 Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.758146 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cc565dc7d-zt9pz"] Sep 30 20:05:46 crc kubenswrapper[4603]: W0930 20:05:46.767748 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53799743_167b_4a74_9cab_3e591a04391b.slice/crio-283a6917b2b45df61f3b4278a9786e1c8b2218acac52d060309d15dad589a525 WatchSource:0}: Error finding container 283a6917b2b45df61f3b4278a9786e1c8b2218acac52d060309d15dad589a525: Status 404 returned error can't find the container with id 283a6917b2b45df61f3b4278a9786e1c8b2218acac52d060309d15dad589a525 Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.817025 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerStarted","Data":"c5e5db1a377e4b952c66a2cb0b717fb855832f51a8d3736e9e51bab750b790f7"} Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.818025 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" event={"ID":"96a738e3-111f-46ca-846b-b9f28284e84c","Type":"ContainerStarted","Data":"1482e19bbac6c1925acd4a9cc3ceb7a27810f95baa3f4dfa223acdc2b4e4a032"} Sep 30 20:05:46 crc kubenswrapper[4603]: I0930 20:05:46.819424 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerStarted","Data":"283a6917b2b45df61f3b4278a9786e1c8b2218acac52d060309d15dad589a525"} Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.479232 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.591153 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config\") pod \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.591245 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb\") pod \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.591275 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb\") pod \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.591302 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfrpj\" (UniqueName: \"kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj\") pod \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.591358 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc\") pod \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\" (UID: \"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d\") " Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.627362 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj" (OuterVolumeSpecName: "kube-api-access-nfrpj") pod "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" (UID: "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d"). InnerVolumeSpecName "kube-api-access-nfrpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.671304 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" (UID: "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.689325 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config" (OuterVolumeSpecName: "config") pod "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" (UID: "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.694766 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfrpj\" (UniqueName: \"kubernetes.io/projected/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-kube-api-access-nfrpj\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.694793 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.694806 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.707286 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" (UID: "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.758689 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" (UID: "0d010ab6-46a2-4e6a-b4ab-a757cdcec29d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.796289 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.796317 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.834556 4603 generic.go:334] "Generic (PLEG): container finished" podID="96a738e3-111f-46ca-846b-b9f28284e84c" containerID="316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5" exitCode=0 Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.834628 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" event={"ID":"96a738e3-111f-46ca-846b-b9f28284e84c","Type":"ContainerDied","Data":"316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5"} Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.836900 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" event={"ID":"0d010ab6-46a2-4e6a-b4ab-a757cdcec29d","Type":"ContainerDied","Data":"343804cd66bf330c9a8e493d4d3746fc4866df49c68cb96b9c65f30bde608dd6"} Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.836966 4603 scope.go:117] "RemoveContainer" containerID="fa544ff80f03c62af9c3a06942e9196e145b4df440469850b4f3aa52d0b69604" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.837194 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-dnxql" Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.896448 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:47 crc kubenswrapper[4603]: I0930 20:05:47.905189 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-dnxql"] Sep 30 20:05:48 crc kubenswrapper[4603]: I0930 20:05:48.776733 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" path="/var/lib/kubelet/pods/0d010ab6-46a2-4e6a-b4ab-a757cdcec29d/volumes" Sep 30 20:05:51 crc kubenswrapper[4603]: I0930 20:05:51.885004 4603 generic.go:334] "Generic (PLEG): container finished" podID="9947ef36-b071-4df8-992a-6f7894bb6daf" containerID="e52ed40163cd1aff1eb59426b1bc695c43b7b8c073ce298029696eb02cbac8d7" exitCode=0 Sep 30 20:05:51 crc kubenswrapper[4603]: I0930 20:05:51.885096 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6zkm" event={"ID":"9947ef36-b071-4df8-992a-6f7894bb6daf","Type":"ContainerDied","Data":"e52ed40163cd1aff1eb59426b1bc695c43b7b8c073ce298029696eb02cbac8d7"} Sep 30 20:05:58 crc kubenswrapper[4603]: E0930 20:05:58.116080 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 20:05:58 crc kubenswrapper[4603]: E0930 20:05:58.117015 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5ddh58bhb9hddh56h547h5ffh5f6h65fhffhd9hfdh86h688h64h647h65h84h579h5cbh5dchb8h97h59h55fhc4h58bh556h544h556h584h67q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b74ms,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-787895f79-ldrlh_openstack(12e73686-8083-4128-afd6-84cd7fa8843f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:05:59 crc kubenswrapper[4603]: E0930 20:05:59.148341 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-787895f79-ldrlh" podUID="12e73686-8083-4128-afd6-84cd7fa8843f" Sep 30 20:06:04 crc kubenswrapper[4603]: E0930 20:06:04.113801 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 30 20:06:04 crc kubenswrapper[4603]: E0930 20:06:04.114325 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cdh58fh88h94h5bh694h5cfh9ch649h648h56h5bch68bhffh558h5fbh5d6h7fh695h575h66dh5c4h67dh649h8h654hfbhcbh6ch56fhb9h674q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lmfrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(29b4231f-d9d3-4ac4-ba39-f6d9ea962724): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.599609 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.600240 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n54ch68fh89h656h58dh88h677h95h648h667h5f5hb4h659h665h9fh587h87h5c6h65fh8bh5cbh59hfdh97h5dbh85h559h9h586h58bh696h699q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fj98w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6d99cfbf7-25x78_openstack(9f3879b3-8f74-461c-9863-38b454275d08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.603824 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6d99cfbf7-25x78" podUID="9f3879b3-8f74-461c-9863-38b454275d08" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.685729 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.685931 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n6dh66bh686h5h5b9h657h688h684h59h5ddh699h5c5h67fh65dhfch549h64ch649h8h58fh589h6h655h599h5c9h87h88h586h5ch56dh5d9h699q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p4l5q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6f6d6f87cc-sq4cs_openstack(9f897b15-4e3c-4f3c-8164-a8b95dea1601): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:05 crc kubenswrapper[4603]: E0930 20:06:05.688774 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6f6d6f87cc-sq4cs" podUID="9f897b15-4e3c-4f3c-8164-a8b95dea1601" Sep 30 20:06:10 crc kubenswrapper[4603]: I0930 20:06:10.081740 4603 generic.go:334] "Generic (PLEG): container finished" podID="4255d3b6-48b0-4a39-8991-bd70191f02ee" containerID="fff0e3ddc8c18a9925865d4dfd15879c17f595cdf49f1421d2851e4e0734e809" exitCode=0 Sep 30 20:06:10 crc kubenswrapper[4603]: I0930 20:06:10.081846 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-gb2nj" event={"ID":"4255d3b6-48b0-4a39-8991-bd70191f02ee","Type":"ContainerDied","Data":"fff0e3ddc8c18a9925865d4dfd15879c17f595cdf49f1421d2851e4e0734e809"} Sep 30 20:06:17 crc kubenswrapper[4603]: E0930 20:06:17.476880 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 20:06:17 crc kubenswrapper[4603]: E0930 20:06:17.477474 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n94h5bdh97h679h685h5b5h67dh664h649h67dh84h58dh5f6h546hf6h5c5h668hc5h688hb7h5c8h58ch695h74h686h6bh555h558h664h594h5d5hdbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zd57z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7cc565dc7d-zt9pz_openstack(53799743-167b-4a74-9cab-3e591a04391b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:17 crc kubenswrapper[4603]: E0930 20:06:17.484695 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 30 20:06:17 crc kubenswrapper[4603]: E0930 20:06:17.484897 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h558h655h645h644h65fh66dh597h556h546h66bh646hbdh549h84h5bbh7dhcdh674h8ch78h577h557h76hb8h676h669h6h5bhb6h59h5dbq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s72jt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-8575cd6744-wt57f_openstack(e53c6d5a-9a76-4d2e-b821-68c74620f22b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.601569 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.606452 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781200 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781281 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781340 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs\") pod \"12e73686-8083-4128-afd6-84cd7fa8843f\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781409 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts\") pod \"12e73686-8083-4128-afd6-84cd7fa8843f\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781528 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key\") pod \"12e73686-8083-4128-afd6-84cd7fa8843f\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781574 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52cvf\" (UniqueName: \"kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781616 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781682 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data\") pod \"12e73686-8083-4128-afd6-84cd7fa8843f\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781702 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs" (OuterVolumeSpecName: "logs") pod "12e73686-8083-4128-afd6-84cd7fa8843f" (UID: "12e73686-8083-4128-afd6-84cd7fa8843f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781752 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b74ms\" (UniqueName: \"kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms\") pod \"12e73686-8083-4128-afd6-84cd7fa8843f\" (UID: \"12e73686-8083-4128-afd6-84cd7fa8843f\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781795 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781840 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data\") pod \"9947ef36-b071-4df8-992a-6f7894bb6daf\" (UID: \"9947ef36-b071-4df8-992a-6f7894bb6daf\") " Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.781901 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts" (OuterVolumeSpecName: "scripts") pod "12e73686-8083-4128-afd6-84cd7fa8843f" (UID: "12e73686-8083-4128-afd6-84cd7fa8843f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.782388 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12e73686-8083-4128-afd6-84cd7fa8843f-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.782411 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.782862 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data" (OuterVolumeSpecName: "config-data") pod "12e73686-8083-4128-afd6-84cd7fa8843f" (UID: "12e73686-8083-4128-afd6-84cd7fa8843f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.787674 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "12e73686-8083-4128-afd6-84cd7fa8843f" (UID: "12e73686-8083-4128-afd6-84cd7fa8843f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.787711 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf" (OuterVolumeSpecName: "kube-api-access-52cvf") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "kube-api-access-52cvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.787987 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.788600 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts" (OuterVolumeSpecName: "scripts") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.788792 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.790007 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms" (OuterVolumeSpecName: "kube-api-access-b74ms") pod "12e73686-8083-4128-afd6-84cd7fa8843f" (UID: "12e73686-8083-4128-afd6-84cd7fa8843f"). InnerVolumeSpecName "kube-api-access-b74ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.813563 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.814629 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data" (OuterVolumeSpecName: "config-data") pod "9947ef36-b071-4df8-992a-6f7894bb6daf" (UID: "9947ef36-b071-4df8-992a-6f7894bb6daf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885488 4603 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885538 4603 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885562 4603 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12e73686-8083-4128-afd6-84cd7fa8843f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885583 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52cvf\" (UniqueName: \"kubernetes.io/projected/9947ef36-b071-4df8-992a-6f7894bb6daf-kube-api-access-52cvf\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885601 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885618 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12e73686-8083-4128-afd6-84cd7fa8843f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885634 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b74ms\" (UniqueName: \"kubernetes.io/projected/12e73686-8083-4128-afd6-84cd7fa8843f-kube-api-access-b74ms\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.885653 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:17 crc kubenswrapper[4603]: I0930 20:06:17.886544 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9947ef36-b071-4df8-992a-6f7894bb6daf-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.180642 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-787895f79-ldrlh" Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.180649 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-787895f79-ldrlh" event={"ID":"12e73686-8083-4128-afd6-84cd7fa8843f","Type":"ContainerDied","Data":"f0b6d609a7dbdf40bf49dcc9c1d97f919c4608f73861a92a5d3bfbf31d0faeec"} Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.183384 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6zkm" event={"ID":"9947ef36-b071-4df8-992a-6f7894bb6daf","Type":"ContainerDied","Data":"78d787cba58cabc28321ba2d5726ccf2cc38042b9c38cd1186b58f29766fb1dd"} Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.183443 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78d787cba58cabc28321ba2d5726ccf2cc38042b9c38cd1186b58f29766fb1dd" Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.183530 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6zkm" Sep 30 20:06:18 crc kubenswrapper[4603]: E0930 20:06:18.214690 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.244058 4603 scope.go:117] "RemoveContainer" containerID="114ed701f26ade4954dfa4ed42409df5b2f8e0817855c578a4c60189304b0a1a" Sep 30 20:06:18 crc kubenswrapper[4603]: E0930 20:06:18.244434 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.294880 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.304193 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-787895f79-ldrlh"] Sep 30 20:06:18 crc kubenswrapper[4603]: I0930 20:06:18.713657 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-t6zkm"] Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.721603 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-t6zkm"] Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.779867 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12e73686-8083-4128-afd6-84cd7fa8843f" path="/var/lib/kubelet/pods/12e73686-8083-4128-afd6-84cd7fa8843f/volumes" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.780450 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9947ef36-b071-4df8-992a-6f7894bb6daf" path="/var/lib/kubelet/pods/9947ef36-b071-4df8-992a-6f7894bb6daf/volumes" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.814515 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9k8jw"] Sep 30 20:06:23 crc kubenswrapper[4603]: E0930 20:06:18.814977 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="dnsmasq-dns" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.814995 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="dnsmasq-dns" Sep 30 20:06:23 crc kubenswrapper[4603]: E0930 20:06:18.815020 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="init" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.815029 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="init" Sep 30 20:06:23 crc kubenswrapper[4603]: E0930 20:06:18.815051 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9947ef36-b071-4df8-992a-6f7894bb6daf" containerName="keystone-bootstrap" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.815061 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9947ef36-b071-4df8-992a-6f7894bb6daf" containerName="keystone-bootstrap" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.815316 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d010ab6-46a2-4e6a-b4ab-a757cdcec29d" containerName="dnsmasq-dns" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.815338 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9947ef36-b071-4df8-992a-6f7894bb6daf" containerName="keystone-bootstrap" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.815999 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.826550 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9k8jw"] Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.831716 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.832058 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.832125 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.832444 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hqmtj" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908323 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908362 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908409 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908435 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908471 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:18.908503 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nvc7\" (UniqueName: \"kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009701 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009749 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009805 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009833 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009869 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.009900 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nvc7\" (UniqueName: \"kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.015568 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.016045 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.016399 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.019902 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.020954 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.047087 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nvc7\" (UniqueName: \"kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7\") pod \"keystone-bootstrap-9k8jw\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: I0930 20:06:19.142913 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:23 crc kubenswrapper[4603]: E0930 20:06:19.196071 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" Sep 30 20:06:23 crc kubenswrapper[4603]: E0930 20:06:19.196132 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" Sep 30 20:06:24 crc kubenswrapper[4603]: I0930 20:06:24.955133 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:06:24 crc kubenswrapper[4603]: I0930 20:06:24.966151 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:06:24 crc kubenswrapper[4603]: I0930 20:06:24.973498 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-gb2nj" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119365 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key\") pod \"9f3879b3-8f74-461c-9863-38b454275d08\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119441 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4l5q\" (UniqueName: \"kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q\") pod \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119543 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dzsm\" (UniqueName: \"kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm\") pod \"4255d3b6-48b0-4a39-8991-bd70191f02ee\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119578 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data\") pod \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119608 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs\") pod \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119650 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data\") pod \"9f3879b3-8f74-461c-9863-38b454275d08\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119721 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data\") pod \"4255d3b6-48b0-4a39-8991-bd70191f02ee\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119771 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts\") pod \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119810 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle\") pod \"4255d3b6-48b0-4a39-8991-bd70191f02ee\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119871 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts\") pod \"9f3879b3-8f74-461c-9863-38b454275d08\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119906 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj98w\" (UniqueName: \"kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w\") pod \"9f3879b3-8f74-461c-9863-38b454275d08\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119941 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs\") pod \"9f3879b3-8f74-461c-9863-38b454275d08\" (UID: \"9f3879b3-8f74-461c-9863-38b454275d08\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119963 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key\") pod \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\" (UID: \"9f897b15-4e3c-4f3c-8164-a8b95dea1601\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.119994 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data\") pod \"4255d3b6-48b0-4a39-8991-bd70191f02ee\" (UID: \"4255d3b6-48b0-4a39-8991-bd70191f02ee\") " Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.120362 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs" (OuterVolumeSpecName: "logs") pod "9f897b15-4e3c-4f3c-8164-a8b95dea1601" (UID: "9f897b15-4e3c-4f3c-8164-a8b95dea1601"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.120482 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f897b15-4e3c-4f3c-8164-a8b95dea1601-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.120979 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts" (OuterVolumeSpecName: "scripts") pod "9f3879b3-8f74-461c-9863-38b454275d08" (UID: "9f3879b3-8f74-461c-9863-38b454275d08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.120972 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data" (OuterVolumeSpecName: "config-data") pod "9f3879b3-8f74-461c-9863-38b454275d08" (UID: "9f3879b3-8f74-461c-9863-38b454275d08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.121213 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs" (OuterVolumeSpecName: "logs") pod "9f3879b3-8f74-461c-9863-38b454275d08" (UID: "9f3879b3-8f74-461c-9863-38b454275d08"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.121411 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data" (OuterVolumeSpecName: "config-data") pod "9f897b15-4e3c-4f3c-8164-a8b95dea1601" (UID: "9f897b15-4e3c-4f3c-8164-a8b95dea1601"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.121444 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts" (OuterVolumeSpecName: "scripts") pod "9f897b15-4e3c-4f3c-8164-a8b95dea1601" (UID: "9f897b15-4e3c-4f3c-8164-a8b95dea1601"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.124911 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9f3879b3-8f74-461c-9863-38b454275d08" (UID: "9f3879b3-8f74-461c-9863-38b454275d08"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.125447 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q" (OuterVolumeSpecName: "kube-api-access-p4l5q") pod "9f897b15-4e3c-4f3c-8164-a8b95dea1601" (UID: "9f897b15-4e3c-4f3c-8164-a8b95dea1601"). InnerVolumeSpecName "kube-api-access-p4l5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.126593 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9f897b15-4e3c-4f3c-8164-a8b95dea1601" (UID: "9f897b15-4e3c-4f3c-8164-a8b95dea1601"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.126669 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4255d3b6-48b0-4a39-8991-bd70191f02ee" (UID: "4255d3b6-48b0-4a39-8991-bd70191f02ee"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.129380 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w" (OuterVolumeSpecName: "kube-api-access-fj98w") pod "9f3879b3-8f74-461c-9863-38b454275d08" (UID: "9f3879b3-8f74-461c-9863-38b454275d08"). InnerVolumeSpecName "kube-api-access-fj98w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.130031 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm" (OuterVolumeSpecName: "kube-api-access-8dzsm") pod "4255d3b6-48b0-4a39-8991-bd70191f02ee" (UID: "4255d3b6-48b0-4a39-8991-bd70191f02ee"). InnerVolumeSpecName "kube-api-access-8dzsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.145206 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4255d3b6-48b0-4a39-8991-bd70191f02ee" (UID: "4255d3b6-48b0-4a39-8991-bd70191f02ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.161540 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data" (OuterVolumeSpecName: "config-data") pod "4255d3b6-48b0-4a39-8991-bd70191f02ee" (UID: "4255d3b6-48b0-4a39-8991-bd70191f02ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222329 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f3879b3-8f74-461c-9863-38b454275d08-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222356 4603 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f897b15-4e3c-4f3c-8164-a8b95dea1601-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222367 4603 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222377 4603 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f3879b3-8f74-461c-9863-38b454275d08-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222386 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4l5q\" (UniqueName: \"kubernetes.io/projected/9f897b15-4e3c-4f3c-8164-a8b95dea1601-kube-api-access-p4l5q\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222398 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dzsm\" (UniqueName: \"kubernetes.io/projected/4255d3b6-48b0-4a39-8991-bd70191f02ee-kube-api-access-8dzsm\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222406 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222416 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222425 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222432 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f897b15-4e3c-4f3c-8164-a8b95dea1601-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222440 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4255d3b6-48b0-4a39-8991-bd70191f02ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222449 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3879b3-8f74-461c-9863-38b454275d08-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.222457 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj98w\" (UniqueName: \"kubernetes.io/projected/9f3879b3-8f74-461c-9863-38b454275d08-kube-api-access-fj98w\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.247482 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-gb2nj" event={"ID":"4255d3b6-48b0-4a39-8991-bd70191f02ee","Type":"ContainerDied","Data":"15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5"} Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.247522 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15577088bec4966904d57eba36019cbbc2f7a072e2d304833b7e97a0200bebe5" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.247529 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-gb2nj" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.249662 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f6d6f87cc-sq4cs" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.249656 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f6d6f87cc-sq4cs" event={"ID":"9f897b15-4e3c-4f3c-8164-a8b95dea1601","Type":"ContainerDied","Data":"72cb7730ce2e050c68ce3e65e454c420510d189ae396878f1ca1d57e9db9c92b"} Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.251964 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d99cfbf7-25x78" event={"ID":"9f3879b3-8f74-461c-9863-38b454275d08","Type":"ContainerDied","Data":"4b9e4d8d1ea40e2d464175b45e3ce681856418e410397fa4439ed9ab3088ad1c"} Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.251990 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d99cfbf7-25x78" Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.322309 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.325475 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6f6d6f87cc-sq4cs"] Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.347764 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:06:25 crc kubenswrapper[4603]: I0930 20:06:25.355623 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d99cfbf7-25x78"] Sep 30 20:06:25 crc kubenswrapper[4603]: E0930 20:06:25.427992 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Sep 30 20:06:25 crc kubenswrapper[4603]: E0930 20:06:25.428185 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n2lbc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-t94b6_openstack(49895f67-376c-4c08-9382-18aee2212e04): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:25 crc kubenswrapper[4603]: E0930 20:06:25.429366 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-t94b6" podUID="49895f67-376c-4c08-9382-18aee2212e04" Sep 30 20:06:26 crc kubenswrapper[4603]: E0930 20:06:26.270866 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-t94b6" podUID="49895f67-376c-4c08-9382-18aee2212e04" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.482014 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.593142 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:06:26 crc kubenswrapper[4603]: E0930 20:06:26.593603 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4255d3b6-48b0-4a39-8991-bd70191f02ee" containerName="glance-db-sync" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.593620 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4255d3b6-48b0-4a39-8991-bd70191f02ee" containerName="glance-db-sync" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.593821 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4255d3b6-48b0-4a39-8991-bd70191f02ee" containerName="glance-db-sync" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.599203 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.617568 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.760770 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.761020 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.761092 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.761130 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7k5q\" (UniqueName: \"kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.761149 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.761229 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.776003 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f3879b3-8f74-461c-9863-38b454275d08" path="/var/lib/kubelet/pods/9f3879b3-8f74-461c-9863-38b454275d08/volumes" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.776598 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f897b15-4e3c-4f3c-8164-a8b95dea1601" path="/var/lib/kubelet/pods/9f897b15-4e3c-4f3c-8164-a8b95dea1601/volumes" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862711 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862761 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862855 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862878 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862928 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.862959 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7k5q\" (UniqueName: \"kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.863655 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.863675 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.863967 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.864083 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.864484 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.903283 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7k5q\" (UniqueName: \"kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q\") pod \"dnsmasq-dns-57c957c4ff-htxzg\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:26 crc kubenswrapper[4603]: E0930 20:06:26.944189 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 30 20:06:26 crc kubenswrapper[4603]: E0930 20:06:26.944345 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k5nmq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-cgrnb_openstack(7a87079f-7cb1-447c-a950-bb204031afce): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:26 crc kubenswrapper[4603]: E0930 20:06:26.946687 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-cgrnb" podUID="7a87079f-7cb1-447c-a950-bb204031afce" Sep 30 20:06:26 crc kubenswrapper[4603]: I0930 20:06:26.965970 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:27 crc kubenswrapper[4603]: E0930 20:06:27.250485 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified" Sep 30 20:06:27 crc kubenswrapper[4603]: E0930 20:06:27.250653 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-notification-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cdh58fh88h94h5bh694h5cfh9ch649h648h56h5bch68bhffh558h5fbh5d6h7fh695h575h66dh5c4h67dh649h8h654hfbhcbh6ch56fhb9h674q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-notification-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lmfrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/notificationhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(29b4231f-d9d3-4ac4-ba39-f6d9ea962724): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:06:27 crc kubenswrapper[4603]: E0930 20:06:27.285434 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-cgrnb" podUID="7a87079f-7cb1-447c-a950-bb204031afce" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.471785 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.484373 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.498725 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-fnjbp" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.501013 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.503065 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.506963 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.578807 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579084 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579145 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579212 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579230 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579262 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.579278 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw7gj\" (UniqueName: \"kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.652474 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.653954 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.657234 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.675417 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680817 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680860 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680910 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680960 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680980 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.680995 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.681012 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw7gj\" (UniqueName: \"kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.681716 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.687415 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.692575 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.693947 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.697143 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.697195 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.710858 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw7gj\" (UniqueName: \"kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.744525 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.782408 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785590 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6qt5\" (UniqueName: \"kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785668 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785701 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785777 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785818 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.785859 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.847626 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887344 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887432 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887537 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887573 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887644 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887700 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.887727 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6qt5\" (UniqueName: \"kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.888475 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.888806 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.889837 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.895400 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.900937 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.904723 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.905469 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6qt5\" (UniqueName: \"kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.936094 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:27 crc kubenswrapper[4603]: I0930 20:06:27.990050 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.015149 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9k8jw"] Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.063147 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:06:28 crc kubenswrapper[4603]: W0930 20:06:28.071756 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f561a20_3add_4fea_88e3_15e16af5d2d3.slice/crio-e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72 WatchSource:0}: Error finding container e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72: Status 404 returned error can't find the container with id e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72 Sep 30 20:06:28 crc kubenswrapper[4603]: W0930 20:06:28.133720 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddae67246_73b8_4810_9f01_2dde949d65ba.slice/crio-70cf6adbddf97d7cf6520db7f6fa758f916bbc893c6842bec5b23879b398e64f WatchSource:0}: Error finding container 70cf6adbddf97d7cf6520db7f6fa758f916bbc893c6842bec5b23879b398e64f: Status 404 returned error can't find the container with id 70cf6adbddf97d7cf6520db7f6fa758f916bbc893c6842bec5b23879b398e64f Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.296368 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9k8jw" event={"ID":"9f561a20-3add-4fea-88e3-15e16af5d2d3","Type":"ContainerStarted","Data":"e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72"} Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.306792 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" event={"ID":"dae67246-73b8-4810-9f01-2dde949d65ba","Type":"ContainerStarted","Data":"70cf6adbddf97d7cf6520db7f6fa758f916bbc893c6842bec5b23879b398e64f"} Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.317284 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2m748" event={"ID":"be703fd8-b3d7-4462-a905-5a835f8e2125","Type":"ContainerStarted","Data":"86a68997eaac56d0e2ed27db846e648bfd83959a62837c3b92915317593f148c"} Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.322813 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" event={"ID":"96a738e3-111f-46ca-846b-b9f28284e84c","Type":"ContainerStarted","Data":"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1"} Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.322957 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="dnsmasq-dns" containerID="cri-o://9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1" gracePeriod=10 Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.323139 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.350047 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-2m748" podStartSLOduration=3.619806883 podStartE2EDuration="51.350028935s" podCreationTimestamp="2025-09-30 20:05:37 +0000 UTC" firstStartedPulling="2025-09-30 20:05:39.179656704 +0000 UTC m=+1141.118115522" lastFinishedPulling="2025-09-30 20:06:26.909878756 +0000 UTC m=+1188.848337574" observedRunningTime="2025-09-30 20:06:28.34302453 +0000 UTC m=+1190.281483338" watchObservedRunningTime="2025-09-30 20:06:28.350028935 +0000 UTC m=+1190.288487753" Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.371431 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" podStartSLOduration=44.371411487 podStartE2EDuration="44.371411487s" podCreationTimestamp="2025-09-30 20:05:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:28.359341273 +0000 UTC m=+1190.297800091" watchObservedRunningTime="2025-09-30 20:06:28.371411487 +0000 UTC m=+1190.309870305" Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.498660 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:28 crc kubenswrapper[4603]: W0930 20:06:28.507564 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf634b0f2_24f3_44d5_b6d8_2e1da4bc01e8.slice/crio-ca217011e5c07b9507b65aba6bef99ad9164b7d6f262fc56585d0d22bce5dd8a WatchSource:0}: Error finding container ca217011e5c07b9507b65aba6bef99ad9164b7d6f262fc56585d0d22bce5dd8a: Status 404 returned error can't find the container with id ca217011e5c07b9507b65aba6bef99ad9164b7d6f262fc56585d0d22bce5dd8a Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.711624 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:28 crc kubenswrapper[4603]: W0930 20:06:28.737688 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod284f7d9c_1218_488a_af80_1f37c7d219d2.slice/crio-f0ae1bfd8938b372a4c57c7501f5e12cae318d28a5ad613b81942745dd4912bd WatchSource:0}: Error finding container f0ae1bfd8938b372a4c57c7501f5e12cae318d28a5ad613b81942745dd4912bd: Status 404 returned error can't find the container with id f0ae1bfd8938b372a4c57c7501f5e12cae318d28a5ad613b81942745dd4912bd Sep 30 20:06:28 crc kubenswrapper[4603]: I0930 20:06:28.871608 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.006571 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.006844 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.007081 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.007123 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.007228 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn2rl\" (UniqueName: \"kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.007317 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0\") pod \"96a738e3-111f-46ca-846b-b9f28284e84c\" (UID: \"96a738e3-111f-46ca-846b-b9f28284e84c\") " Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.037490 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl" (OuterVolumeSpecName: "kube-api-access-hn2rl") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "kube-api-access-hn2rl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.135807 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn2rl\" (UniqueName: \"kubernetes.io/projected/96a738e3-111f-46ca-846b-b9f28284e84c-kube-api-access-hn2rl\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.165823 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.169854 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.212880 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.242463 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.242499 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.242512 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.251743 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config" (OuterVolumeSpecName: "config") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.257486 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "96a738e3-111f-46ca-846b-b9f28284e84c" (UID: "96a738e3-111f-46ca-846b-b9f28284e84c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.340238 4603 generic.go:334] "Generic (PLEG): container finished" podID="dae67246-73b8-4810-9f01-2dde949d65ba" containerID="ed97a6d79923e1e447bd548ca14142dad9a6e73fadf29edc20d53d96a5ff50d3" exitCode=0 Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.340328 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" event={"ID":"dae67246-73b8-4810-9f01-2dde949d65ba","Type":"ContainerDied","Data":"ed97a6d79923e1e447bd548ca14142dad9a6e73fadf29edc20d53d96a5ff50d3"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.344470 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.344501 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a738e3-111f-46ca-846b-b9f28284e84c-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.357884 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerStarted","Data":"f0ae1bfd8938b372a4c57c7501f5e12cae318d28a5ad613b81942745dd4912bd"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.390933 4603 generic.go:334] "Generic (PLEG): container finished" podID="96a738e3-111f-46ca-846b-b9f28284e84c" containerID="9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1" exitCode=0 Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.391123 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" event={"ID":"96a738e3-111f-46ca-846b-b9f28284e84c","Type":"ContainerDied","Data":"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.391155 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" event={"ID":"96a738e3-111f-46ca-846b-b9f28284e84c","Type":"ContainerDied","Data":"1482e19bbac6c1925acd4a9cc3ceb7a27810f95baa3f4dfa223acdc2b4e4a032"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.391267 4603 scope.go:117] "RemoveContainer" containerID="9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.392186 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-sfsw5" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.427489 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerStarted","Data":"ca217011e5c07b9507b65aba6bef99ad9164b7d6f262fc56585d0d22bce5dd8a"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.461118 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9k8jw" event={"ID":"9f561a20-3add-4fea-88e3-15e16af5d2d3","Type":"ContainerStarted","Data":"74677c69850fc7b0c850d1d10d905fcb3f41f9a9c3f0ce990525d54958d10128"} Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.500530 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9k8jw" podStartSLOduration=11.50050181 podStartE2EDuration="11.50050181s" podCreationTimestamp="2025-09-30 20:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:29.485597267 +0000 UTC m=+1191.424056085" watchObservedRunningTime="2025-09-30 20:06:29.50050181 +0000 UTC m=+1191.438960628" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.531647 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.551902 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-sfsw5"] Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.564831 4603 scope.go:117] "RemoveContainer" containerID="316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.644886 4603 scope.go:117] "RemoveContainer" containerID="9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1" Sep 30 20:06:29 crc kubenswrapper[4603]: E0930 20:06:29.645480 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1\": container with ID starting with 9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1 not found: ID does not exist" containerID="9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.645515 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1"} err="failed to get container status \"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1\": rpc error: code = NotFound desc = could not find container \"9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1\": container with ID starting with 9fdae6deefaf31297fbac0d28cb1a1d70a02ee95b564fde1d2abc8cdd41341b1 not found: ID does not exist" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.645536 4603 scope.go:117] "RemoveContainer" containerID="316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5" Sep 30 20:06:29 crc kubenswrapper[4603]: E0930 20:06:29.646177 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5\": container with ID starting with 316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5 not found: ID does not exist" containerID="316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5" Sep 30 20:06:29 crc kubenswrapper[4603]: I0930 20:06:29.646203 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5"} err="failed to get container status \"316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5\": rpc error: code = NotFound desc = could not find container \"316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5\": container with ID starting with 316c9aca24555392c3456ae2c2de45e32cb82abdd5534b5b34e798081e3314e5 not found: ID does not exist" Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.069513 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.240640 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.472322 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" event={"ID":"dae67246-73b8-4810-9f01-2dde949d65ba","Type":"ContainerStarted","Data":"40d7b23316535d85069ccbfeaf2b76db34b5ca08d18274a44fdc88ebc4543a33"} Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.473881 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.481005 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerStarted","Data":"3892da308ac04b2b3297f7cc86df5573b9aae100db6996bd230fec7832162cb5"} Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.500638 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" podStartSLOduration=4.500617515 podStartE2EDuration="4.500617515s" podCreationTimestamp="2025-09-30 20:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:30.491788331 +0000 UTC m=+1192.430247149" watchObservedRunningTime="2025-09-30 20:06:30.500617515 +0000 UTC m=+1192.439076333" Sep 30 20:06:30 crc kubenswrapper[4603]: I0930 20:06:30.788959 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" path="/var/lib/kubelet/pods/96a738e3-111f-46ca-846b-b9f28284e84c/volumes" Sep 30 20:06:31 crc kubenswrapper[4603]: I0930 20:06:31.495976 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerStarted","Data":"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77"} Sep 30 20:06:34 crc kubenswrapper[4603]: I0930 20:06:34.529243 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerStarted","Data":"cae7bf33a0fdf925c54c65b63ed05cb4d66be0c6a62d407f6423a50f82dc57f8"} Sep 30 20:06:34 crc kubenswrapper[4603]: I0930 20:06:34.529542 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-log" containerID="cri-o://3892da308ac04b2b3297f7cc86df5573b9aae100db6996bd230fec7832162cb5" gracePeriod=30 Sep 30 20:06:34 crc kubenswrapper[4603]: I0930 20:06:34.529645 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-httpd" containerID="cri-o://cae7bf33a0fdf925c54c65b63ed05cb4d66be0c6a62d407f6423a50f82dc57f8" gracePeriod=30 Sep 30 20:06:34 crc kubenswrapper[4603]: I0930 20:06:34.551316 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.551301581 podStartE2EDuration="8.551301581s" podCreationTimestamp="2025-09-30 20:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:34.549852661 +0000 UTC m=+1196.488311479" watchObservedRunningTime="2025-09-30 20:06:34.551301581 +0000 UTC m=+1196.489760399" Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.540323 4603 generic.go:334] "Generic (PLEG): container finished" podID="be703fd8-b3d7-4462-a905-5a835f8e2125" containerID="86a68997eaac56d0e2ed27db846e648bfd83959a62837c3b92915317593f148c" exitCode=0 Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.540486 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2m748" event={"ID":"be703fd8-b3d7-4462-a905-5a835f8e2125","Type":"ContainerDied","Data":"86a68997eaac56d0e2ed27db846e648bfd83959a62837c3b92915317593f148c"} Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.552742 4603 generic.go:334] "Generic (PLEG): container finished" podID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerID="cae7bf33a0fdf925c54c65b63ed05cb4d66be0c6a62d407f6423a50f82dc57f8" exitCode=143 Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.552790 4603 generic.go:334] "Generic (PLEG): container finished" podID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerID="3892da308ac04b2b3297f7cc86df5573b9aae100db6996bd230fec7832162cb5" exitCode=143 Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.552824 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerDied","Data":"cae7bf33a0fdf925c54c65b63ed05cb4d66be0c6a62d407f6423a50f82dc57f8"} Sep 30 20:06:35 crc kubenswrapper[4603]: I0930 20:06:35.552860 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerDied","Data":"3892da308ac04b2b3297f7cc86df5573b9aae100db6996bd230fec7832162cb5"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.416925 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503634 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503711 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw7gj\" (UniqueName: \"kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503774 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503832 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503965 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.503996 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.504088 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs\") pod \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\" (UID: \"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8\") " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.505545 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.507284 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs" (OuterVolumeSpecName: "logs") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.510476 4603 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.510508 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.513733 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.514511 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj" (OuterVolumeSpecName: "kube-api-access-sw7gj") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "kube-api-access-sw7gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.516479 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts" (OuterVolumeSpecName: "scripts") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.542963 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.562566 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data" (OuterVolumeSpecName: "config-data") pod "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" (UID: "f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.571373 4603 generic.go:334] "Generic (PLEG): container finished" podID="9f561a20-3add-4fea-88e3-15e16af5d2d3" containerID="74677c69850fc7b0c850d1d10d905fcb3f41f9a9c3f0ce990525d54958d10128" exitCode=0 Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.571576 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9k8jw" event={"ID":"9f561a20-3add-4fea-88e3-15e16af5d2d3","Type":"ContainerDied","Data":"74677c69850fc7b0c850d1d10d905fcb3f41f9a9c3f0ce990525d54958d10128"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.576628 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerStarted","Data":"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.578080 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerStarted","Data":"d227ec12278192d402b46511d88a32c1222c27a3008ee64c6eab74835c605780"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.580208 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerStarted","Data":"8d002b92e3d74d3d7e892e05a13ecb49ba4438c9d0c3d1ad87f0336586cd4f40"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.582009 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.583019 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8","Type":"ContainerDied","Data":"ca217011e5c07b9507b65aba6bef99ad9164b7d6f262fc56585d0d22bce5dd8a"} Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.583083 4603 scope.go:117] "RemoveContainer" containerID="cae7bf33a0fdf925c54c65b63ed05cb4d66be0c6a62d407f6423a50f82dc57f8" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.611715 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.611772 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.611784 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.611793 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw7gj\" (UniqueName: \"kubernetes.io/projected/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-kube-api-access-sw7gj\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.611802 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.650265 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.659478 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.663457 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.670956 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.672751 4603 scope.go:117] "RemoveContainer" containerID="3892da308ac04b2b3297f7cc86df5573b9aae100db6996bd230fec7832162cb5" Sep 30 20:06:36 crc kubenswrapper[4603]: E0930 20:06:36.672811 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-log" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.672827 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-log" Sep 30 20:06:36 crc kubenswrapper[4603]: E0930 20:06:36.672840 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="dnsmasq-dns" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.672845 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="dnsmasq-dns" Sep 30 20:06:36 crc kubenswrapper[4603]: E0930 20:06:36.672866 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="init" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.672872 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="init" Sep 30 20:06:36 crc kubenswrapper[4603]: E0930 20:06:36.672926 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-httpd" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.672933 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-httpd" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.673108 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-log" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.673123 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" containerName="glance-httpd" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.673140 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a738e3-111f-46ca-846b-b9f28284e84c" containerName="dnsmasq-dns" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.674100 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.676801 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.677065 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.677071 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.713346 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.818030 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.818375 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.818417 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.818448 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.819843 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8" path="/var/lib/kubelet/pods/f634b0f2-24f3-44d5-b6d8-2e1da4bc01e8/volumes" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.821101 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tknzx\" (UniqueName: \"kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.822798 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.825346 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.825595 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927154 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tknzx\" (UniqueName: \"kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927266 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927312 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927365 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927446 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927474 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927517 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.927543 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.930332 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.939760 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.945652 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.947806 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.966745 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tknzx\" (UniqueName: \"kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.968302 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.974087 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.983158 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:36 crc kubenswrapper[4603]: I0930 20:06:36.996142 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.010660 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " pod="openstack/glance-default-external-api-0" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.054662 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.055051 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="dnsmasq-dns" containerID="cri-o://e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe" gracePeriod=10 Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.122779 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2m748" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.244828 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts\") pod \"be703fd8-b3d7-4462-a905-5a835f8e2125\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.244898 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d44z7\" (UniqueName: \"kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7\") pod \"be703fd8-b3d7-4462-a905-5a835f8e2125\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.245005 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data\") pod \"be703fd8-b3d7-4462-a905-5a835f8e2125\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.245124 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle\") pod \"be703fd8-b3d7-4462-a905-5a835f8e2125\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.245154 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs\") pod \"be703fd8-b3d7-4462-a905-5a835f8e2125\" (UID: \"be703fd8-b3d7-4462-a905-5a835f8e2125\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.245652 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs" (OuterVolumeSpecName: "logs") pod "be703fd8-b3d7-4462-a905-5a835f8e2125" (UID: "be703fd8-b3d7-4462-a905-5a835f8e2125"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.256319 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts" (OuterVolumeSpecName: "scripts") pod "be703fd8-b3d7-4462-a905-5a835f8e2125" (UID: "be703fd8-b3d7-4462-a905-5a835f8e2125"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.256550 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7" (OuterVolumeSpecName: "kube-api-access-d44z7") pod "be703fd8-b3d7-4462-a905-5a835f8e2125" (UID: "be703fd8-b3d7-4462-a905-5a835f8e2125"). InnerVolumeSpecName "kube-api-access-d44z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.296679 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be703fd8-b3d7-4462-a905-5a835f8e2125" (UID: "be703fd8-b3d7-4462-a905-5a835f8e2125"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.311377 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.326383 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data" (OuterVolumeSpecName: "config-data") pod "be703fd8-b3d7-4462-a905-5a835f8e2125" (UID: "be703fd8-b3d7-4462-a905-5a835f8e2125"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.346914 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d44z7\" (UniqueName: \"kubernetes.io/projected/be703fd8-b3d7-4462-a905-5a835f8e2125-kube-api-access-d44z7\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.347112 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.347189 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.347244 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be703fd8-b3d7-4462-a905-5a835f8e2125-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.347292 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be703fd8-b3d7-4462-a905-5a835f8e2125-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.530133 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.614351 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerStarted","Data":"f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.621973 4603 generic.go:334] "Generic (PLEG): container finished" podID="b7487171-f64a-433c-b167-e757a12c60d6" containerID="e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe" exitCode=0 Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.622028 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" event={"ID":"b7487171-f64a-433c-b167-e757a12c60d6","Type":"ContainerDied","Data":"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.622054 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" event={"ID":"b7487171-f64a-433c-b167-e757a12c60d6","Type":"ContainerDied","Data":"8fb979d1f9cd18b3407865eb95dfda3a80657c00e9bf1e9db7c6087cb9382fbd"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.622068 4603 scope.go:117] "RemoveContainer" containerID="e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.622217 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6rp55" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.640116 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7cc565dc7d-zt9pz" podStartSLOduration=3.246286852 podStartE2EDuration="52.640095431s" podCreationTimestamp="2025-09-30 20:05:45 +0000 UTC" firstStartedPulling="2025-09-30 20:05:46.772231613 +0000 UTC m=+1148.710690431" lastFinishedPulling="2025-09-30 20:06:36.166040192 +0000 UTC m=+1198.104499010" observedRunningTime="2025-09-30 20:06:37.637237132 +0000 UTC m=+1199.575695950" watchObservedRunningTime="2025-09-30 20:06:37.640095431 +0000 UTC m=+1199.578554239" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.645537 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerStarted","Data":"ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.653534 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb\") pod \"b7487171-f64a-433c-b167-e757a12c60d6\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.653848 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config\") pod \"b7487171-f64a-433c-b167-e757a12c60d6\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.653941 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb\") pod \"b7487171-f64a-433c-b167-e757a12c60d6\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.654296 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74nts\" (UniqueName: \"kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts\") pod \"b7487171-f64a-433c-b167-e757a12c60d6\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.654556 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc\") pod \"b7487171-f64a-433c-b167-e757a12c60d6\" (UID: \"b7487171-f64a-433c-b167-e757a12c60d6\") " Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.660917 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts" (OuterVolumeSpecName: "kube-api-access-74nts") pod "b7487171-f64a-433c-b167-e757a12c60d6" (UID: "b7487171-f64a-433c-b167-e757a12c60d6"). InnerVolumeSpecName "kube-api-access-74nts". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.661125 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerStarted","Data":"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.661489 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-log" containerID="cri-o://963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" gracePeriod=30 Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.662475 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-httpd" containerID="cri-o://f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" gracePeriod=30 Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.662586 4603 scope.go:117] "RemoveContainer" containerID="a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.718480 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8575cd6744-wt57f" podStartSLOduration=5.6225014380000005 podStartE2EDuration="52.718461955s" podCreationTimestamp="2025-09-30 20:05:45 +0000 UTC" firstStartedPulling="2025-09-30 20:05:46.748721348 +0000 UTC m=+1148.687180166" lastFinishedPulling="2025-09-30 20:06:33.844681865 +0000 UTC m=+1195.783140683" observedRunningTime="2025-09-30 20:06:37.678337352 +0000 UTC m=+1199.616796170" watchObservedRunningTime="2025-09-30 20:06:37.718461955 +0000 UTC m=+1199.656920773" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.719390 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2m748" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.721039 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2m748" event={"ID":"be703fd8-b3d7-4462-a905-5a835f8e2125","Type":"ContainerDied","Data":"6eed94f661bb8a763b6b2de3823cad8fc413fb90ea7d5b38077ce76f55ec8cd9"} Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.721078 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6eed94f661bb8a763b6b2de3823cad8fc413fb90ea7d5b38077ce76f55ec8cd9" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.754482 4603 scope.go:117] "RemoveContainer" containerID="e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.757602 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74nts\" (UniqueName: \"kubernetes.io/projected/b7487171-f64a-433c-b167-e757a12c60d6-kube-api-access-74nts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: E0930 20:06:37.760317 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe\": container with ID starting with e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe not found: ID does not exist" containerID="e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.760380 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe"} err="failed to get container status \"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe\": rpc error: code = NotFound desc = could not find container \"e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe\": container with ID starting with e960440da4610537121d5aafaffacb2babe8de625f4ce4438bb6fa916d465dfe not found: ID does not exist" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.760432 4603 scope.go:117] "RemoveContainer" containerID="a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.760563 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c9d79bbb8-vkl5v"] Sep 30 20:06:37 crc kubenswrapper[4603]: E0930 20:06:37.761831 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be703fd8-b3d7-4462-a905-5a835f8e2125" containerName="placement-db-sync" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.761849 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="be703fd8-b3d7-4462-a905-5a835f8e2125" containerName="placement-db-sync" Sep 30 20:06:37 crc kubenswrapper[4603]: E0930 20:06:37.761880 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="init" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.761910 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="init" Sep 30 20:06:37 crc kubenswrapper[4603]: E0930 20:06:37.761922 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="dnsmasq-dns" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.761931 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="dnsmasq-dns" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.762463 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7487171-f64a-433c-b167-e757a12c60d6" containerName="dnsmasq-dns" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.765432 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="be703fd8-b3d7-4462-a905-5a835f8e2125" containerName="placement-db-sync" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.771144 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: E0930 20:06:37.777934 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819\": container with ID starting with a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819 not found: ID does not exist" containerID="a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.777987 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819"} err="failed to get container status \"a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819\": rpc error: code = NotFound desc = could not find container \"a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819\": container with ID starting with a90e8c4715b5cb3275f91d9252b61bd5df36b684acdd4fdb84fc02a8ba8eb819 not found: ID does not exist" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.781148 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.781702 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.781813 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.782101 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-cl7gf" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.782230 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.813764 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c9d79bbb8-vkl5v"] Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.815727 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.815714652 podStartE2EDuration="11.815714652s" podCreationTimestamp="2025-09-30 20:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:37.720990395 +0000 UTC m=+1199.659449213" watchObservedRunningTime="2025-09-30 20:06:37.815714652 +0000 UTC m=+1199.754173470" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.833685 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b7487171-f64a-433c-b167-e757a12c60d6" (UID: "b7487171-f64a-433c-b167-e757a12c60d6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.850069 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7487171-f64a-433c-b167-e757a12c60d6" (UID: "b7487171-f64a-433c-b167-e757a12c60d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861677 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-combined-ca-bundle\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861708 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r74w\" (UniqueName: \"kubernetes.io/projected/866dea6a-1003-486a-9893-5ede909f55dd-kube-api-access-9r74w\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861801 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-config-data\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861820 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-scripts\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861841 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/866dea6a-1003-486a-9893-5ede909f55dd-logs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861871 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-internal-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861888 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-public-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861960 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.861969 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.889994 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b7487171-f64a-433c-b167-e757a12c60d6" (UID: "b7487171-f64a-433c-b167-e757a12c60d6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.901340 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.917330 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config" (OuterVolumeSpecName: "config") pod "b7487171-f64a-433c-b167-e757a12c60d6" (UID: "b7487171-f64a-433c-b167-e757a12c60d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.963546 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-config-data\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.963850 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-scripts\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.963875 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/866dea6a-1003-486a-9893-5ede909f55dd-logs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.963944 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-internal-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.963962 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-public-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.964019 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-combined-ca-bundle\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.964041 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r74w\" (UniqueName: \"kubernetes.io/projected/866dea6a-1003-486a-9893-5ede909f55dd-kube-api-access-9r74w\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.964102 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.964112 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7487171-f64a-433c-b167-e757a12c60d6-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.965727 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.967638 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/866dea6a-1003-486a-9893-5ede909f55dd-logs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.968834 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-combined-ca-bundle\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.969299 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-scripts\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.969828 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6rp55"] Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.972262 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-config-data\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.972628 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-public-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:37 crc kubenswrapper[4603]: I0930 20:06:37.974046 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/866dea6a-1003-486a-9893-5ede909f55dd-internal-tls-certs\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.021437 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r74w\" (UniqueName: \"kubernetes.io/projected/866dea6a-1003-486a-9893-5ede909f55dd-kube-api-access-9r74w\") pod \"placement-c9d79bbb8-vkl5v\" (UID: \"866dea6a-1003-486a-9893-5ede909f55dd\") " pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.127770 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.332342 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474004 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474175 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474275 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474295 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nvc7\" (UniqueName: \"kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474312 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.474340 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys\") pod \"9f561a20-3add-4fea-88e3-15e16af5d2d3\" (UID: \"9f561a20-3add-4fea-88e3-15e16af5d2d3\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.483298 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.484112 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts" (OuterVolumeSpecName: "scripts") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.487280 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.491364 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7" (OuterVolumeSpecName: "kube-api-access-7nvc7") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "kube-api-access-7nvc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.531766 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data" (OuterVolumeSpecName: "config-data") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.576309 4603 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.576337 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nvc7\" (UniqueName: \"kubernetes.io/projected/9f561a20-3add-4fea-88e3-15e16af5d2d3-kube-api-access-7nvc7\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.576348 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.576356 4603 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.576364 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.602065 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f561a20-3add-4fea-88e3-15e16af5d2d3" (UID: "9f561a20-3add-4fea-88e3-15e16af5d2d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.605669 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c9d79bbb8-vkl5v"] Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.644984 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.677722 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f561a20-3add-4fea-88e3-15e16af5d2d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.756026 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerStarted","Data":"1a078c00d2d665a1e293f8362337b04a4c0a16a8531bfc386d4869264a9f9c0e"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.756067 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerStarted","Data":"a3cd06c2e01ad71710ca659783ec6ba5f480dcc26d3e78b3187d33d6f9c3b9a0"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.759801 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8bd66565b-k2wg7"] Sep 30 20:06:38 crc kubenswrapper[4603]: E0930 20:06:38.766800 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f561a20-3add-4fea-88e3-15e16af5d2d3" containerName="keystone-bootstrap" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.766834 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f561a20-3add-4fea-88e3-15e16af5d2d3" containerName="keystone-bootstrap" Sep 30 20:06:38 crc kubenswrapper[4603]: E0930 20:06:38.766856 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-log" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.766862 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-log" Sep 30 20:06:38 crc kubenswrapper[4603]: E0930 20:06:38.766877 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-httpd" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.766885 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-httpd" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.767156 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-httpd" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.767185 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerName="glance-log" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.767199 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f561a20-3add-4fea-88e3-15e16af5d2d3" containerName="keystone-bootstrap" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.778111 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.778824 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6qt5\" (UniqueName: \"kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.778958 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.779095 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.779280 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.780709 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.780854 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts\") pod \"284f7d9c-1218-488a-af80-1f37c7d219d2\" (UID: \"284f7d9c-1218-488a-af80-1f37c7d219d2\") " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.779800 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.786933 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.788110 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9k8jw" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.790962 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs" (OuterVolumeSpecName: "logs") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.797642 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts" (OuterVolumeSpecName: "scripts") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.797800 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.798717 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.798822 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.798935 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.799003 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.799466 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hqmtj" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.803091 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7487171-f64a-433c-b167-e757a12c60d6" path="/var/lib/kubelet/pods/b7487171-f64a-433c-b167-e757a12c60d6/volumes" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.808144 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.808462 4603 generic.go:334] "Generic (PLEG): container finished" podID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerID="f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" exitCode=0 Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.811584 4603 generic.go:334] "Generic (PLEG): container finished" podID="284f7d9c-1218-488a-af80-1f37c7d219d2" containerID="963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" exitCode=143 Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.808527 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.812572 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5" (OuterVolumeSpecName: "kube-api-access-z6qt5") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "kube-api-access-z6qt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.832925 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.880444 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data" (OuterVolumeSpecName: "config-data") pod "284f7d9c-1218-488a-af80-1f37c7d219d2" (UID: "284f7d9c-1218-488a-af80-1f37c7d219d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.882961 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-internal-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.883020 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-scripts\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.883068 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-config-data\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.885694 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx9qb\" (UniqueName: \"kubernetes.io/projected/614e45f1-3173-4eb1-8b47-56760f3468f4-kube-api-access-jx9qb\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.885813 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-combined-ca-bundle\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.885981 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-credential-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.889141 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-public-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.889458 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-fernet-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895496 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8bd66565b-k2wg7"] Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895536 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9k8jw" event={"ID":"9f561a20-3add-4fea-88e3-15e16af5d2d3","Type":"ContainerDied","Data":"e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895557 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4180e1899a6bd7f340d311d06ab26f12c580508f49d4a1415e9b3ea8ad9cd72" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895568 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerDied","Data":"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895581 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerDied","Data":"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895590 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"284f7d9c-1218-488a-af80-1f37c7d219d2","Type":"ContainerDied","Data":"f0ae1bfd8938b372a4c57c7501f5e12cae318d28a5ad613b81942745dd4912bd"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895599 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c9d79bbb8-vkl5v" event={"ID":"866dea6a-1003-486a-9893-5ede909f55dd","Type":"ContainerStarted","Data":"55f53914cd2f0b0b13552bb89fc80dc8650b4dd94d0d5b519717a6f11397e51f"} Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895614 4603 scope.go:117] "RemoveContainer" containerID="f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895921 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895949 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895960 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895969 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895978 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f7d9c-1218-488a-af80-1f37c7d219d2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895987 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6qt5\" (UniqueName: \"kubernetes.io/projected/284f7d9c-1218-488a-af80-1f37c7d219d2-kube-api-access-z6qt5\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.895996 4603 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/284f7d9c-1218-488a-af80-1f37c7d219d2-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.923027 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 20:06:38 crc kubenswrapper[4603]: I0930 20:06:38.949378 4603 scope.go:117] "RemoveContainer" containerID="963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.000531 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-public-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.000718 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-fernet-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.000892 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-internal-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.000930 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-scripts\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.001012 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-config-data\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.001077 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx9qb\" (UniqueName: \"kubernetes.io/projected/614e45f1-3173-4eb1-8b47-56760f3468f4-kube-api-access-jx9qb\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.001125 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-combined-ca-bundle\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.001307 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-credential-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.001463 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.006715 4603 scope.go:117] "RemoveContainer" containerID="f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.012312 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-scripts\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.012467 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-credential-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.022402 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-combined-ca-bundle\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.022773 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-public-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: E0930 20:06:39.022866 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d\": container with ID starting with f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d not found: ID does not exist" containerID="f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.022889 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d"} err="failed to get container status \"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d\": rpc error: code = NotFound desc = could not find container \"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d\": container with ID starting with f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d not found: ID does not exist" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.022918 4603 scope.go:117] "RemoveContainer" containerID="963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.024901 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-config-data\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.026947 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-internal-tls-certs\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: E0930 20:06:39.027243 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77\": container with ID starting with 963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77 not found: ID does not exist" containerID="963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.027424 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77"} err="failed to get container status \"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77\": rpc error: code = NotFound desc = could not find container \"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77\": container with ID starting with 963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77 not found: ID does not exist" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.027502 4603 scope.go:117] "RemoveContainer" containerID="f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.028044 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx9qb\" (UniqueName: \"kubernetes.io/projected/614e45f1-3173-4eb1-8b47-56760f3468f4-kube-api-access-jx9qb\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.028239 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/614e45f1-3173-4eb1-8b47-56760f3468f4-fernet-keys\") pod \"keystone-8bd66565b-k2wg7\" (UID: \"614e45f1-3173-4eb1-8b47-56760f3468f4\") " pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.028390 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d"} err="failed to get container status \"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d\": rpc error: code = NotFound desc = could not find container \"f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d\": container with ID starting with f130ba42f90fd071e737277a57da1f57f541e7b557b973108c0614d89429936d not found: ID does not exist" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.028557 4603 scope.go:117] "RemoveContainer" containerID="963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.028998 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77"} err="failed to get container status \"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77\": rpc error: code = NotFound desc = could not find container \"963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77\": container with ID starting with 963d6da53d64dc4cc54d1efe589007c00f5a63c87555bce316d0b7c78593ab77 not found: ID does not exist" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.203383 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.256498 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.271004 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.282004 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.286499 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.288545 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.288753 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.301446 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412254 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412291 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412318 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mt8b\" (UniqueName: \"kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412341 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412381 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412413 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412439 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.412481 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.513921 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.514230 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.514275 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.514901 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.514574 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.515720 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.515846 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.515873 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mt8b\" (UniqueName: \"kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.515912 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.516122 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.516270 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.531127 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.531794 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.532263 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.532618 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.539053 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mt8b\" (UniqueName: \"kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.626330 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.855377 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t94b6" event={"ID":"49895f67-376c-4c08-9382-18aee2212e04","Type":"ContainerStarted","Data":"84c7df746db3444cf2d62ebc0a0db4f41e236efaa9b46d776eb254476f881a9f"} Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.868863 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c9d79bbb8-vkl5v" event={"ID":"866dea6a-1003-486a-9893-5ede909f55dd","Type":"ContainerStarted","Data":"b9e6136f83ee78ad667c937e3b940ee3b94c75f7484016e71dbd2f03adae2d66"} Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.868903 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c9d79bbb8-vkl5v" event={"ID":"866dea6a-1003-486a-9893-5ede909f55dd","Type":"ContainerStarted","Data":"70301bde48b6d29ff3705bc68b613e889fbac7fdd7750a99cb6ce829a5e1a2fb"} Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.869593 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.869620 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.875537 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-t94b6" podStartSLOduration=3.402608768 podStartE2EDuration="1m2.875523736s" podCreationTimestamp="2025-09-30 20:05:37 +0000 UTC" firstStartedPulling="2025-09-30 20:05:39.334314121 +0000 UTC m=+1141.272772929" lastFinishedPulling="2025-09-30 20:06:38.807229079 +0000 UTC m=+1200.745687897" observedRunningTime="2025-09-30 20:06:39.874536228 +0000 UTC m=+1201.812995046" watchObservedRunningTime="2025-09-30 20:06:39.875523736 +0000 UTC m=+1201.813982554" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.886661 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerStarted","Data":"39f4e803dadc6f72a90b2534271c9f093ed0bfc46c7771ee64238291b8213dbd"} Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.900039 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-c9d79bbb8-vkl5v" podStartSLOduration=2.900021015 podStartE2EDuration="2.900021015s" podCreationTimestamp="2025-09-30 20:06:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:39.888847385 +0000 UTC m=+1201.827306203" watchObservedRunningTime="2025-09-30 20:06:39.900021015 +0000 UTC m=+1201.838479833" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.910832 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.910812445 podStartE2EDuration="3.910812445s" podCreationTimestamp="2025-09-30 20:06:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:39.908215682 +0000 UTC m=+1201.846674490" watchObservedRunningTime="2025-09-30 20:06:39.910812445 +0000 UTC m=+1201.849271263" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.915736 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:39 crc kubenswrapper[4603]: I0930 20:06:39.968848 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8bd66565b-k2wg7"] Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.629239 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:06:40 crc kubenswrapper[4603]: W0930 20:06:40.647894 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b9cb86d_897c_445a_bdc1_96aee550d17d.slice/crio-cb2aeefd75959497e63e3ac0f13b2ee27a7d10a8ae16ff1db181191ac73c664f WatchSource:0}: Error finding container cb2aeefd75959497e63e3ac0f13b2ee27a7d10a8ae16ff1db181191ac73c664f: Status 404 returned error can't find the container with id cb2aeefd75959497e63e3ac0f13b2ee27a7d10a8ae16ff1db181191ac73c664f Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.783495 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284f7d9c-1218-488a-af80-1f37c7d219d2" path="/var/lib/kubelet/pods/284f7d9c-1218-488a-af80-1f37c7d219d2/volumes" Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.903794 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerStarted","Data":"cb2aeefd75959497e63e3ac0f13b2ee27a7d10a8ae16ff1db181191ac73c664f"} Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.906377 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8bd66565b-k2wg7" event={"ID":"614e45f1-3173-4eb1-8b47-56760f3468f4","Type":"ContainerStarted","Data":"627e4e039fd50a5edc9a1dd119f3e405132096cf605c9118730072cbb0cf60fd"} Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.906442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8bd66565b-k2wg7" event={"ID":"614e45f1-3173-4eb1-8b47-56760f3468f4","Type":"ContainerStarted","Data":"aa30d3033e745b0acd2dcbbf821c60edf680dc87c68a202df67ebb9e1957fc76"} Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.906530 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.911202 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cgrnb" event={"ID":"7a87079f-7cb1-447c-a950-bb204031afce","Type":"ContainerStarted","Data":"785a9a7fe6aa5b441c87196db57d1d4042df017621d17b20a4b96b1b0cb8362f"} Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.922085 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-8bd66565b-k2wg7" podStartSLOduration=2.922070619 podStartE2EDuration="2.922070619s" podCreationTimestamp="2025-09-30 20:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:40.921521683 +0000 UTC m=+1202.859980501" watchObservedRunningTime="2025-09-30 20:06:40.922070619 +0000 UTC m=+1202.860529437" Sep 30 20:06:40 crc kubenswrapper[4603]: I0930 20:06:40.948892 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-cgrnb" podStartSLOduration=4.351563103 podStartE2EDuration="1m4.948869562s" podCreationTimestamp="2025-09-30 20:05:36 +0000 UTC" firstStartedPulling="2025-09-30 20:05:38.991257849 +0000 UTC m=+1140.929716657" lastFinishedPulling="2025-09-30 20:06:39.588564298 +0000 UTC m=+1201.527023116" observedRunningTime="2025-09-30 20:06:40.941289751 +0000 UTC m=+1202.879748569" watchObservedRunningTime="2025-09-30 20:06:40.948869562 +0000 UTC m=+1202.887328380" Sep 30 20:06:41 crc kubenswrapper[4603]: I0930 20:06:41.930939 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerStarted","Data":"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13"} Sep 30 20:06:42 crc kubenswrapper[4603]: I0930 20:06:42.939973 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerStarted","Data":"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7"} Sep 30 20:06:42 crc kubenswrapper[4603]: I0930 20:06:42.970491 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.970476367 podStartE2EDuration="3.970476367s" podCreationTimestamp="2025-09-30 20:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:06:42.968035429 +0000 UTC m=+1204.906494247" watchObservedRunningTime="2025-09-30 20:06:42.970476367 +0000 UTC m=+1204.908935175" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.246573 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.247208 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.249327 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.271264 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.271556 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:06:46 crc kubenswrapper[4603]: I0930 20:06:46.273451 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.311819 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.312545 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.342756 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.355641 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.987272 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:06:47 crc kubenswrapper[4603]: I0930 20:06:47.987591 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:06:49 crc kubenswrapper[4603]: I0930 20:06:49.917125 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:49 crc kubenswrapper[4603]: I0930 20:06:49.917430 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:49 crc kubenswrapper[4603]: I0930 20:06:49.959848 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:49 crc kubenswrapper[4603]: I0930 20:06:49.975315 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:50 crc kubenswrapper[4603]: I0930 20:06:50.011094 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:50 crc kubenswrapper[4603]: I0930 20:06:50.011141 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:50 crc kubenswrapper[4603]: E0930 20:06:50.299257 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"ceilometer-notification-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.020216 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerStarted","Data":"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675"} Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.020279 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="sg-core" containerID="cri-o://91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" gracePeriod=30 Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.020397 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="proxy-httpd" containerID="cri-o://026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" gracePeriod=30 Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.020619 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.488792 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541195 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541317 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541383 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541403 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmfrl\" (UniqueName: \"kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541446 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541543 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541567 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data\") pod \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\" (UID: \"29b4231f-d9d3-4ac4-ba39-f6d9ea962724\") " Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541725 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.541820 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.542050 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.542070 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.561825 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts" (OuterVolumeSpecName: "scripts") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.578389 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl" (OuterVolumeSpecName: "kube-api-access-lmfrl") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "kube-api-access-lmfrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.585844 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.604613 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.610957 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data" (OuterVolumeSpecName: "config-data") pod "29b4231f-d9d3-4ac4-ba39-f6d9ea962724" (UID: "29b4231f-d9d3-4ac4-ba39-f6d9ea962724"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.643312 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.643351 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmfrl\" (UniqueName: \"kubernetes.io/projected/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-kube-api-access-lmfrl\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.643362 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.643372 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:51 crc kubenswrapper[4603]: I0930 20:06:51.643381 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29b4231f-d9d3-4ac4-ba39-f6d9ea962724-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.032349 4603 generic.go:334] "Generic (PLEG): container finished" podID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerID="026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" exitCode=0 Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.033268 4603 generic.go:334] "Generic (PLEG): container finished" podID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerID="91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" exitCode=2 Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.032422 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.032426 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerDied","Data":"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675"} Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.035109 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerDied","Data":"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858"} Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.035184 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29b4231f-d9d3-4ac4-ba39-f6d9ea962724","Type":"ContainerDied","Data":"987d3d250649eef71f6e42c98bb618a052b08e5140634f696ddfc5fc0a1e8d7e"} Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.035201 4603 scope.go:117] "RemoveContainer" containerID="026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.074009 4603 scope.go:117] "RemoveContainer" containerID="91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.127224 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.155555 4603 scope.go:117] "RemoveContainer" containerID="026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" Sep 30 20:06:52 crc kubenswrapper[4603]: E0930 20:06:52.156018 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675\": container with ID starting with 026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675 not found: ID does not exist" containerID="026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.156119 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675"} err="failed to get container status \"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675\": rpc error: code = NotFound desc = could not find container \"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675\": container with ID starting with 026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675 not found: ID does not exist" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.156209 4603 scope.go:117] "RemoveContainer" containerID="91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" Sep 30 20:06:52 crc kubenswrapper[4603]: E0930 20:06:52.157412 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858\": container with ID starting with 91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858 not found: ID does not exist" containerID="91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.157450 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858"} err="failed to get container status \"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858\": rpc error: code = NotFound desc = could not find container \"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858\": container with ID starting with 91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858 not found: ID does not exist" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.157479 4603 scope.go:117] "RemoveContainer" containerID="026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.161501 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675"} err="failed to get container status \"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675\": rpc error: code = NotFound desc = could not find container \"026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675\": container with ID starting with 026ece224c94280e0512e744d3f0f890e1b9430bc9eaa4ec0b9e8c5324f0f675 not found: ID does not exist" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.161540 4603 scope.go:117] "RemoveContainer" containerID="91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.176030 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858"} err="failed to get container status \"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858\": rpc error: code = NotFound desc = could not find container \"91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858\": container with ID starting with 91a16e24c4b98a33a5a68923005dd0c525ab496f620ea0a7945fb158b29d3858 not found: ID does not exist" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.181250 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.195750 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:06:52 crc kubenswrapper[4603]: E0930 20:06:52.196265 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="proxy-httpd" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.202721 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="proxy-httpd" Sep 30 20:06:52 crc kubenswrapper[4603]: E0930 20:06:52.202836 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="sg-core" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.202887 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="sg-core" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.203237 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="sg-core" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.203311 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" containerName="proxy-httpd" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.205294 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.210755 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.211136 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.211573 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254126 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w64fs\" (UniqueName: \"kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254254 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254321 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254353 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254412 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254434 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.254456 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.356364 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.356961 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.357043 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.357142 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.357257 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w64fs\" (UniqueName: \"kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.357385 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.357492 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.356897 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.358707 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.364710 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.364967 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.368669 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.369771 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.375956 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w64fs\" (UniqueName: \"kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs\") pod \"ceilometer-0\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.532777 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.533428 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.537257 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.538780 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.553840 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.554150 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.558373 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:06:52 crc kubenswrapper[4603]: I0930 20:06:52.779909 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b4231f-d9d3-4ac4-ba39-f6d9ea962724" path="/var/lib/kubelet/pods/29b4231f-d9d3-4ac4-ba39-f6d9ea962724/volumes" Sep 30 20:06:53 crc kubenswrapper[4603]: I0930 20:06:53.091058 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:06:53 crc kubenswrapper[4603]: W0930 20:06:53.103997 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5cb044d_61cd_425b_9fba_42f9427172d1.slice/crio-cc78d12894f649cd0e730f8b9fb5ea8e5d5566bc29d4c5b7f2a9bb9764abc77c WatchSource:0}: Error finding container cc78d12894f649cd0e730f8b9fb5ea8e5d5566bc29d4c5b7f2a9bb9764abc77c: Status 404 returned error can't find the container with id cc78d12894f649cd0e730f8b9fb5ea8e5d5566bc29d4c5b7f2a9bb9764abc77c Sep 30 20:06:54 crc kubenswrapper[4603]: I0930 20:06:54.055045 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerStarted","Data":"e008aff623142fdc53980328ef3b7dda52eb27d356e64b551522a5a13749204f"} Sep 30 20:06:54 crc kubenswrapper[4603]: I0930 20:06:54.055438 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerStarted","Data":"cc78d12894f649cd0e730f8b9fb5ea8e5d5566bc29d4c5b7f2a9bb9764abc77c"} Sep 30 20:06:55 crc kubenswrapper[4603]: I0930 20:06:55.065890 4603 generic.go:334] "Generic (PLEG): container finished" podID="49895f67-376c-4c08-9382-18aee2212e04" containerID="84c7df746db3444cf2d62ebc0a0db4f41e236efaa9b46d776eb254476f881a9f" exitCode=0 Sep 30 20:06:55 crc kubenswrapper[4603]: I0930 20:06:55.066234 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t94b6" event={"ID":"49895f67-376c-4c08-9382-18aee2212e04","Type":"ContainerDied","Data":"84c7df746db3444cf2d62ebc0a0db4f41e236efaa9b46d776eb254476f881a9f"} Sep 30 20:06:55 crc kubenswrapper[4603]: I0930 20:06:55.069078 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerStarted","Data":"31d8433fa3dbb7b97bcb05e530912c8db1d862eba5039fb37aac3bc4304e8e5d"} Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.080335 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerStarted","Data":"996edcde2140c54d7f7c150132492fb51e98aea0c9d980747200119274a854fe"} Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.246378 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.272124 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.413765 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t94b6" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.533024 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2lbc\" (UniqueName: \"kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc\") pod \"49895f67-376c-4c08-9382-18aee2212e04\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.533215 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data\") pod \"49895f67-376c-4c08-9382-18aee2212e04\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.533335 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle\") pod \"49895f67-376c-4c08-9382-18aee2212e04\" (UID: \"49895f67-376c-4c08-9382-18aee2212e04\") " Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.542382 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "49895f67-376c-4c08-9382-18aee2212e04" (UID: "49895f67-376c-4c08-9382-18aee2212e04"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.567532 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc" (OuterVolumeSpecName: "kube-api-access-n2lbc") pod "49895f67-376c-4c08-9382-18aee2212e04" (UID: "49895f67-376c-4c08-9382-18aee2212e04"). InnerVolumeSpecName "kube-api-access-n2lbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.578703 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49895f67-376c-4c08-9382-18aee2212e04" (UID: "49895f67-376c-4c08-9382-18aee2212e04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.635832 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.635876 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2lbc\" (UniqueName: \"kubernetes.io/projected/49895f67-376c-4c08-9382-18aee2212e04-kube-api-access-n2lbc\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:56 crc kubenswrapper[4603]: I0930 20:06:56.635892 4603 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/49895f67-376c-4c08-9382-18aee2212e04-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.089825 4603 generic.go:334] "Generic (PLEG): container finished" podID="7a87079f-7cb1-447c-a950-bb204031afce" containerID="785a9a7fe6aa5b441c87196db57d1d4042df017621d17b20a4b96b1b0cb8362f" exitCode=0 Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.090029 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cgrnb" event={"ID":"7a87079f-7cb1-447c-a950-bb204031afce","Type":"ContainerDied","Data":"785a9a7fe6aa5b441c87196db57d1d4042df017621d17b20a4b96b1b0cb8362f"} Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.093046 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t94b6" event={"ID":"49895f67-376c-4c08-9382-18aee2212e04","Type":"ContainerDied","Data":"912773e710556bfb6a4dc40a2f23ca0047473754f8cd2105709655f98804eba7"} Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.093081 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="912773e710556bfb6a4dc40a2f23ca0047473754f8cd2105709655f98804eba7" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.093137 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t94b6" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.096590 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerStarted","Data":"89eae234742c6e2e93439ed477c47817376c7e75f9385aacf98cbd597b1e47ed"} Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.097645 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.156939 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.640347819 podStartE2EDuration="5.156919362s" podCreationTimestamp="2025-09-30 20:06:52 +0000 UTC" firstStartedPulling="2025-09-30 20:06:53.105486706 +0000 UTC m=+1215.043945524" lastFinishedPulling="2025-09-30 20:06:56.622058249 +0000 UTC m=+1218.560517067" observedRunningTime="2025-09-30 20:06:57.146555665 +0000 UTC m=+1219.085014493" watchObservedRunningTime="2025-09-30 20:06:57.156919362 +0000 UTC m=+1219.095378180" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.429693 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-794f5b84fd-6qbxk"] Sep 30 20:06:57 crc kubenswrapper[4603]: E0930 20:06:57.430032 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49895f67-376c-4c08-9382-18aee2212e04" containerName="barbican-db-sync" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.430047 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="49895f67-376c-4c08-9382-18aee2212e04" containerName="barbican-db-sync" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.435748 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="49895f67-376c-4c08-9382-18aee2212e04" containerName="barbican-db-sync" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.437197 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.444155 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ktchh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.444436 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.444551 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.471676 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-794f5b84fd-6qbxk"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.477797 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-694445bff9-srxdg"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.479221 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.483448 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.489560 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-694445bff9-srxdg"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.621816 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da706fb6-9ab9-4c32-bd34-2b9afe444c20-logs\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.621874 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr5gz\" (UniqueName: \"kubernetes.io/projected/da706fb6-9ab9-4c32-bd34-2b9afe444c20-kube-api-access-gr5gz\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622079 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-combined-ca-bundle\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622105 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data-custom\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622121 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data-custom\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622138 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622185 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh89f\" (UniqueName: \"kubernetes.io/projected/47e3b799-3f78-46c1-916e-cca00da66c8c-kube-api-access-dh89f\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622262 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47e3b799-3f78-46c1-916e-cca00da66c8c-logs\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622281 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.622308 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-combined-ca-bundle\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.641208 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.642618 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.678136 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723590 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da706fb6-9ab9-4c32-bd34-2b9afe444c20-logs\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723648 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr5gz\" (UniqueName: \"kubernetes.io/projected/da706fb6-9ab9-4c32-bd34-2b9afe444c20-kube-api-access-gr5gz\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723671 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-combined-ca-bundle\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723692 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723715 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data-custom\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723732 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data-custom\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723752 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723778 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh89f\" (UniqueName: \"kubernetes.io/projected/47e3b799-3f78-46c1-916e-cca00da66c8c-kube-api-access-dh89f\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723801 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7778r\" (UniqueName: \"kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723818 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723849 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723879 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723897 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723914 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47e3b799-3f78-46c1-916e-cca00da66c8c-logs\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723931 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.723960 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-combined-ca-bundle\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.725331 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da706fb6-9ab9-4c32-bd34-2b9afe444c20-logs\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.725576 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47e3b799-3f78-46c1-916e-cca00da66c8c-logs\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.742745 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.744599 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.747900 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.748529 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.758287 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data-custom\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.758646 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-combined-ca-bundle\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.759215 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.760051 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh89f\" (UniqueName: \"kubernetes.io/projected/47e3b799-3f78-46c1-916e-cca00da66c8c-kube-api-access-dh89f\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.760099 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da706fb6-9ab9-4c32-bd34-2b9afe444c20-config-data-custom\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.765222 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr5gz\" (UniqueName: \"kubernetes.io/projected/da706fb6-9ab9-4c32-bd34-2b9afe444c20-kube-api-access-gr5gz\") pod \"barbican-worker-694445bff9-srxdg\" (UID: \"da706fb6-9ab9-4c32-bd34-2b9afe444c20\") " pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.770011 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-combined-ca-bundle\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.783528 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47e3b799-3f78-46c1-916e-cca00da66c8c-config-data\") pod \"barbican-keystone-listener-794f5b84fd-6qbxk\" (UID: \"47e3b799-3f78-46c1-916e-cca00da66c8c\") " pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.830993 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834021 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2kph\" (UniqueName: \"kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834120 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7778r\" (UniqueName: \"kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834186 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834220 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834277 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834401 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834562 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.834723 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.835464 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.835564 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.836264 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.840099 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.841153 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.841409 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.841762 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.842420 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.862416 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7778r\" (UniqueName: \"kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r\") pod \"dnsmasq-dns-6d66f584d7-n7qbg\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.923082 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-694445bff9-srxdg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.941840 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.942010 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2kph\" (UniqueName: \"kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.942061 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.942911 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.942971 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.943808 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.952195 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.953821 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.955576 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.961742 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:06:57 crc kubenswrapper[4603]: I0930 20:06:57.980206 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2kph\" (UniqueName: \"kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph\") pod \"barbican-api-647b495d86-qvfmh\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.249479 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.434485 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-794f5b84fd-6qbxk"] Sep 30 20:06:58 crc kubenswrapper[4603]: W0930 20:06:58.442333 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47e3b799_3f78_46c1_916e_cca00da66c8c.slice/crio-629eb32a8c1c720760d21a17beb221ce8912b4a42415f408dd9228f25a479873 WatchSource:0}: Error finding container 629eb32a8c1c720760d21a17beb221ce8912b4a42415f408dd9228f25a479873: Status 404 returned error can't find the container with id 629eb32a8c1c720760d21a17beb221ce8912b4a42415f408dd9228f25a479873 Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.564157 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-694445bff9-srxdg"] Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.627560 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.669802 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.669913 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.669992 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.670039 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5nmq\" (UniqueName: \"kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.670070 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.670132 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data\") pod \"7a87079f-7cb1-447c-a950-bb204031afce\" (UID: \"7a87079f-7cb1-447c-a950-bb204031afce\") " Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.674773 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.679529 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts" (OuterVolumeSpecName: "scripts") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.681392 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq" (OuterVolumeSpecName: "kube-api-access-k5nmq") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "kube-api-access-k5nmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.684040 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.703862 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.757276 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.759286 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data" (OuterVolumeSpecName: "config-data") pod "7a87079f-7cb1-447c-a950-bb204031afce" (UID: "7a87079f-7cb1-447c-a950-bb204031afce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776180 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776203 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5nmq\" (UniqueName: \"kubernetes.io/projected/7a87079f-7cb1-447c-a950-bb204031afce-kube-api-access-k5nmq\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776212 4603 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7a87079f-7cb1-447c-a950-bb204031afce-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776220 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776228 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.776235 4603 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7a87079f-7cb1-447c-a950-bb204031afce-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:06:58 crc kubenswrapper[4603]: I0930 20:06:58.935491 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.243671 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-cgrnb" event={"ID":"7a87079f-7cb1-447c-a950-bb204031afce","Type":"ContainerDied","Data":"71f33a214dab7ddbb7391efe839906d8d4c7c572e72802f2a35b2f11034608c4"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.243894 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71f33a214dab7ddbb7391efe839906d8d4c7c572e72802f2a35b2f11034608c4" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.243988 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-cgrnb" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.276258 4603 generic.go:334] "Generic (PLEG): container finished" podID="92c1011c-57db-40ba-af2e-f07f48558671" containerID="3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e" exitCode=0 Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.276358 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" event={"ID":"92c1011c-57db-40ba-af2e-f07f48558671","Type":"ContainerDied","Data":"3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.276385 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" event={"ID":"92c1011c-57db-40ba-af2e-f07f48558671","Type":"ContainerStarted","Data":"068c343df97892d84694a450707e14dd5d2a1d27a9e0b1976a96a1e393d09f84"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.287434 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:06:59 crc kubenswrapper[4603]: E0930 20:06:59.287773 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a87079f-7cb1-447c-a950-bb204031afce" containerName="cinder-db-sync" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.287786 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a87079f-7cb1-447c-a950-bb204031afce" containerName="cinder-db-sync" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.287944 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a87079f-7cb1-447c-a950-bb204031afce" containerName="cinder-db-sync" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.289190 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.292815 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.292953 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9nwz8" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.293081 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.293182 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.302751 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.320580 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerStarted","Data":"9ae9a2195a877ac781874aaaa20410bae6961a4d3cfab0c1a6effebda84eed1e"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.326561 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-694445bff9-srxdg" event={"ID":"da706fb6-9ab9-4c32-bd34-2b9afe444c20","Type":"ContainerStarted","Data":"c0db9bc02cd20bceb619cfba8f216a0f2afb980b7bba917a1381f96850257c16"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.397960 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" event={"ID":"47e3b799-3f78-46c1-916e-cca00da66c8c","Type":"ContainerStarted","Data":"629eb32a8c1c720760d21a17beb221ce8912b4a42415f408dd9228f25a479873"} Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.415189 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.452056 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.468836 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.468945 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.498892 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.499239 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92nlz\" (UniqueName: \"kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.499353 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.501035 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.501248 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.501484 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603120 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603198 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603219 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603237 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w79mh\" (UniqueName: \"kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603256 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603332 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603371 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603414 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603443 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92nlz\" (UniqueName: \"kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603476 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603497 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.603521 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.610770 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.611841 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.617554 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.617619 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.619556 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.638704 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.647041 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.647139 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.652427 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.694900 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92nlz\" (UniqueName: \"kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz\") pod \"cinder-scheduler-0\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " pod="openstack/cinder-scheduler-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.707823 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.707873 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.707891 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.707911 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w79mh\" (UniqueName: \"kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.707931 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708156 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708185 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hnkg\" (UniqueName: \"kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708214 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708235 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708265 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708287 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708310 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708327 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.708872 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.709091 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.709639 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.710566 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.715564 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.728082 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w79mh\" (UniqueName: \"kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh\") pod \"dnsmasq-dns-674b76c99f-chg9c\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811338 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811374 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hnkg\" (UniqueName: \"kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811408 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811425 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811454 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811520 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811561 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.811690 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.812345 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.820997 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.825529 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.826080 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.826744 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.831131 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.835712 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hnkg\" (UniqueName: \"kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg\") pod \"cinder-api-0\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " pod="openstack/cinder-api-0" Sep 30 20:06:59 crc kubenswrapper[4603]: I0930 20:06:59.960606 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.021413 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.463877 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" event={"ID":"92c1011c-57db-40ba-af2e-f07f48558671","Type":"ContainerStarted","Data":"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e"} Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.464562 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="dnsmasq-dns" containerID="cri-o://70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e" gracePeriod=10 Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.464900 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.488525 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerStarted","Data":"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133"} Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.488591 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerStarted","Data":"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4"} Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.489799 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.489853 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.505043 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" podStartSLOduration=3.505027734 podStartE2EDuration="3.505027734s" podCreationTimestamp="2025-09-30 20:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:00.504501459 +0000 UTC m=+1222.442960277" watchObservedRunningTime="2025-09-30 20:07:00.505027734 +0000 UTC m=+1222.443486552" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.544509 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-647b495d86-qvfmh" podStartSLOduration=3.544494458 podStartE2EDuration="3.544494458s" podCreationTimestamp="2025-09-30 20:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:00.537632068 +0000 UTC m=+1222.476090886" watchObservedRunningTime="2025-09-30 20:07:00.544494458 +0000 UTC m=+1222.482953276" Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.545253 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.750253 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:00 crc kubenswrapper[4603]: W0930 20:07:00.803091 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8dd56eaa_bb04_4612_acfc_ecf68345ac6c.slice/crio-419aa1e715032013f676394a0ba327643cdfb76a3cd38dae017f57afd6d4a83c WatchSource:0}: Error finding container 419aa1e715032013f676394a0ba327643cdfb76a3cd38dae017f57afd6d4a83c: Status 404 returned error can't find the container with id 419aa1e715032013f676394a0ba327643cdfb76a3cd38dae017f57afd6d4a83c Sep 30 20:07:00 crc kubenswrapper[4603]: I0930 20:07:00.908967 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.186621 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.259340 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.259612 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.259664 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.259841 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.260151 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.260358 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7778r\" (UniqueName: \"kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r\") pod \"92c1011c-57db-40ba-af2e-f07f48558671\" (UID: \"92c1011c-57db-40ba-af2e-f07f48558671\") " Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.267625 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r" (OuterVolumeSpecName: "kube-api-access-7778r") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "kube-api-access-7778r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.343612 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.358666 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config" (OuterVolumeSpecName: "config") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.362072 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7778r\" (UniqueName: \"kubernetes.io/projected/92c1011c-57db-40ba-af2e-f07f48558671-kube-api-access-7778r\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.362097 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.362106 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.370414 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.377104 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.387260 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "92c1011c-57db-40ba-af2e-f07f48558671" (UID: "92c1011c-57db-40ba-af2e-f07f48558671"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.465951 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.465990 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.466002 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92c1011c-57db-40ba-af2e-f07f48558671-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.519521 4603 generic.go:334] "Generic (PLEG): container finished" podID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerID="d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c" exitCode=0 Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.520188 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" event={"ID":"c2ba9f96-59c1-438e-9b89-228c9dcd2409","Type":"ContainerDied","Data":"d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.520336 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" event={"ID":"c2ba9f96-59c1-438e-9b89-228c9dcd2409","Type":"ContainerStarted","Data":"e1de3abfa3306a90c72d5bd83ff03c756ca811c335e6cbfac7af2194dd11575a"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.525862 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerStarted","Data":"419aa1e715032013f676394a0ba327643cdfb76a3cd38dae017f57afd6d4a83c"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.529844 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerStarted","Data":"279365ac2394cbb5c13482e6ee72a33811be36506a60e0fb5f40c55cdec82af7"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.540409 4603 generic.go:334] "Generic (PLEG): container finished" podID="92c1011c-57db-40ba-af2e-f07f48558671" containerID="70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e" exitCode=0 Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.540804 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.541190 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" event={"ID":"92c1011c-57db-40ba-af2e-f07f48558671","Type":"ContainerDied","Data":"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.541362 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-n7qbg" event={"ID":"92c1011c-57db-40ba-af2e-f07f48558671","Type":"ContainerDied","Data":"068c343df97892d84694a450707e14dd5d2a1d27a9e0b1976a96a1e393d09f84"} Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.541471 4603 scope.go:117] "RemoveContainer" containerID="70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e" Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.625349 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:07:01 crc kubenswrapper[4603]: I0930 20:07:01.630771 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-n7qbg"] Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.241896 4603 scope.go:117] "RemoveContainer" containerID="3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e" Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.560874 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerStarted","Data":"773fd757172ed85bd7fb8ac1c212797702ae5c05a8ed88df783a9876c404fc50"} Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.720876 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.776667 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92c1011c-57db-40ba-af2e-f07f48558671" path="/var/lib/kubelet/pods/92c1011c-57db-40ba-af2e-f07f48558671/volumes" Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.938939 4603 scope.go:117] "RemoveContainer" containerID="70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e" Sep 30 20:07:02 crc kubenswrapper[4603]: E0930 20:07:02.939628 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e\": container with ID starting with 70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e not found: ID does not exist" containerID="70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e" Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.939697 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e"} err="failed to get container status \"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e\": rpc error: code = NotFound desc = could not find container \"70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e\": container with ID starting with 70489339271ebb71b0045983a705effa79f7dd99c6570f2afcfffdb5731f274e not found: ID does not exist" Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.939741 4603 scope.go:117] "RemoveContainer" containerID="3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e" Sep 30 20:07:02 crc kubenswrapper[4603]: E0930 20:07:02.940314 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e\": container with ID starting with 3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e not found: ID does not exist" containerID="3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e" Sep 30 20:07:02 crc kubenswrapper[4603]: I0930 20:07:02.940355 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e"} err="failed to get container status \"3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e\": rpc error: code = NotFound desc = could not find container \"3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e\": container with ID starting with 3b6d2ecd92d2c7a0adcbb569478af3b53bf0ba836e07b0462e6acf63e5fab61e not found: ID does not exist" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.599073 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" event={"ID":"47e3b799-3f78-46c1-916e-cca00da66c8c","Type":"ContainerStarted","Data":"8de9a0506de6d9894d3421f8a45974fd37fb3c94e9eaec3d1e42d0a72eb78457"} Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.603924 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" event={"ID":"c2ba9f96-59c1-438e-9b89-228c9dcd2409","Type":"ContainerStarted","Data":"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2"} Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.605440 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.630548 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" podStartSLOduration=4.630528542 podStartE2EDuration="4.630528542s" podCreationTimestamp="2025-09-30 20:06:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:03.622673564 +0000 UTC m=+1225.561132382" watchObservedRunningTime="2025-09-30 20:07:03.630528542 +0000 UTC m=+1225.568987360" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.634485 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-694445bff9-srxdg" event={"ID":"da706fb6-9ab9-4c32-bd34-2b9afe444c20","Type":"ContainerStarted","Data":"583ee0378277b878cc9f5bde9ef44f25e340bc35246b3becea72d24873b4d73d"} Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.978305 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57b5d479d8-xlxfm"] Sep 30 20:07:03 crc kubenswrapper[4603]: E0930 20:07:03.978951 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="init" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.978969 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="init" Sep 30 20:07:03 crc kubenswrapper[4603]: E0930 20:07:03.978995 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="dnsmasq-dns" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.979001 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="dnsmasq-dns" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.979271 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c1011c-57db-40ba-af2e-f07f48558671" containerName="dnsmasq-dns" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.980273 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.986666 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 20:07:03 crc kubenswrapper[4603]: I0930 20:07:03.986867 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:03.994178 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57b5d479d8-xlxfm"] Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.140640 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data-custom\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.140895 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dctk6\" (UniqueName: \"kubernetes.io/projected/850ec93c-cfa1-4bb4-905b-1b8296985c50-kube-api-access-dctk6\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.140945 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-internal-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.140977 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-combined-ca-bundle\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.140995 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/850ec93c-cfa1-4bb4-905b-1b8296985c50-logs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.141021 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-public-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.141066 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.242248 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-internal-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.242322 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-combined-ca-bundle\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.242358 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/850ec93c-cfa1-4bb4-905b-1b8296985c50-logs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.242895 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-public-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.242958 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.243008 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data-custom\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.243030 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dctk6\" (UniqueName: \"kubernetes.io/projected/850ec93c-cfa1-4bb4-905b-1b8296985c50-kube-api-access-dctk6\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.243198 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/850ec93c-cfa1-4bb4-905b-1b8296985c50-logs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.248656 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-internal-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.250075 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-combined-ca-bundle\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.253078 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data-custom\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.257349 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-config-data\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.265823 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/850ec93c-cfa1-4bb4-905b-1b8296985c50-public-tls-certs\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.272776 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dctk6\" (UniqueName: \"kubernetes.io/projected/850ec93c-cfa1-4bb4-905b-1b8296985c50-kube-api-access-dctk6\") pod \"barbican-api-57b5d479d8-xlxfm\" (UID: \"850ec93c-cfa1-4bb4-905b-1b8296985c50\") " pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.494118 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.653757 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerStarted","Data":"3b9cabac9fd9abf4409391c5c61b0d72ada49954fc1145f58dfee2da6fd4a3da"} Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.666722 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-694445bff9-srxdg" event={"ID":"da706fb6-9ab9-4c32-bd34-2b9afe444c20","Type":"ContainerStarted","Data":"4127957472e2ebbcb1f86a845ec7c32b165be84a47f9312cd007cf74583315cd"} Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.673637 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" event={"ID":"47e3b799-3f78-46c1-916e-cca00da66c8c","Type":"ContainerStarted","Data":"48e0e1caf34e0382f8950873feca41f7ccffd14b53dfb7840a72e2f198a27b2c"} Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.682838 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerStarted","Data":"72e1db42f7e375de95d9981932530ca93788d738fe9d370039f612e90ff67965"} Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.692274 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-694445bff9-srxdg" podStartSLOduration=3.217923462 podStartE2EDuration="7.692255046s" podCreationTimestamp="2025-09-30 20:06:57 +0000 UTC" firstStartedPulling="2025-09-30 20:06:58.554997935 +0000 UTC m=+1220.493456743" lastFinishedPulling="2025-09-30 20:07:03.029329509 +0000 UTC m=+1224.967788327" observedRunningTime="2025-09-30 20:07:04.685697344 +0000 UTC m=+1226.624156162" watchObservedRunningTime="2025-09-30 20:07:04.692255046 +0000 UTC m=+1226.630713854" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.698534 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api-log" containerID="cri-o://773fd757172ed85bd7fb8ac1c212797702ae5c05a8ed88df783a9876c404fc50" gracePeriod=30 Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.698720 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.698772 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api" containerID="cri-o://72e1db42f7e375de95d9981932530ca93788d738fe9d370039f612e90ff67965" gracePeriod=30 Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.736638 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-794f5b84fd-6qbxk" podStartSLOduration=3.155302296 podStartE2EDuration="7.736619627s" podCreationTimestamp="2025-09-30 20:06:57 +0000 UTC" firstStartedPulling="2025-09-30 20:06:58.447641478 +0000 UTC m=+1220.386100296" lastFinishedPulling="2025-09-30 20:07:03.028958809 +0000 UTC m=+1224.967417627" observedRunningTime="2025-09-30 20:07:04.713150595 +0000 UTC m=+1226.651609413" watchObservedRunningTime="2025-09-30 20:07:04.736619627 +0000 UTC m=+1226.675078445" Sep 30 20:07:04 crc kubenswrapper[4603]: I0930 20:07:04.810777 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.810758942 podStartE2EDuration="5.810758942s" podCreationTimestamp="2025-09-30 20:06:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:04.750882472 +0000 UTC m=+1226.689341290" watchObservedRunningTime="2025-09-30 20:07:04.810758942 +0000 UTC m=+1226.749217760" Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.252483 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57b5d479d8-xlxfm"] Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.731728 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57b5d479d8-xlxfm" event={"ID":"850ec93c-cfa1-4bb4-905b-1b8296985c50","Type":"ContainerStarted","Data":"b62739f764f364bcab5c54b32d2d0b5c6e7e57af45dd75155e8400f2338254f5"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.731985 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57b5d479d8-xlxfm" event={"ID":"850ec93c-cfa1-4bb4-905b-1b8296985c50","Type":"ContainerStarted","Data":"6c87b814bb9a9bdf7263b8cac6e3b0548a079c3f7151c9842172ad7cc05f4e80"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.763067 4603 generic.go:334] "Generic (PLEG): container finished" podID="a86c5953-cb61-4f11-b581-eb7698adf7ec" containerID="202e659da32a9e682700e9f14341f18f494de34a1c84c1b04b4984e931ad9ca9" exitCode=0 Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.763183 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4265b" event={"ID":"a86c5953-cb61-4f11-b581-eb7698adf7ec","Type":"ContainerDied","Data":"202e659da32a9e682700e9f14341f18f494de34a1c84c1b04b4984e931ad9ca9"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.796538 4603 generic.go:334] "Generic (PLEG): container finished" podID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerID="72e1db42f7e375de95d9981932530ca93788d738fe9d370039f612e90ff67965" exitCode=0 Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.796577 4603 generic.go:334] "Generic (PLEG): container finished" podID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerID="773fd757172ed85bd7fb8ac1c212797702ae5c05a8ed88df783a9876c404fc50" exitCode=143 Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.796619 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerDied","Data":"72e1db42f7e375de95d9981932530ca93788d738fe9d370039f612e90ff67965"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.796646 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerDied","Data":"773fd757172ed85bd7fb8ac1c212797702ae5c05a8ed88df783a9876c404fc50"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.828763 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerStarted","Data":"2bc261f2aa1f2441acd0b701ec42e97b41101633db011782251dc5da1abf7e13"} Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.869641 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.716653351 podStartE2EDuration="6.869626298s" podCreationTimestamp="2025-09-30 20:06:59 +0000 UTC" firstStartedPulling="2025-09-30 20:07:00.879979372 +0000 UTC m=+1222.818438180" lastFinishedPulling="2025-09-30 20:07:03.032952309 +0000 UTC m=+1224.971411127" observedRunningTime="2025-09-30 20:07:05.869459963 +0000 UTC m=+1227.807918781" watchObservedRunningTime="2025-09-30 20:07:05.869626298 +0000 UTC m=+1227.808085116" Sep 30 20:07:05 crc kubenswrapper[4603]: I0930 20:07:05.904759 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021703 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021779 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021801 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021844 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021888 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.021927 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hnkg\" (UniqueName: \"kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.022043 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs\") pod \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\" (UID: \"8dd56eaa-bb04-4612-acfc-ecf68345ac6c\") " Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.023427 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.025140 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs" (OuterVolumeSpecName: "logs") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.042658 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.055903 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg" (OuterVolumeSpecName: "kube-api-access-6hnkg") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "kube-api-access-6hnkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.056286 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts" (OuterVolumeSpecName: "scripts") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.078594 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.124990 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data" (OuterVolumeSpecName: "config-data") pod "8dd56eaa-bb04-4612-acfc-ecf68345ac6c" (UID: "8dd56eaa-bb04-4612-acfc-ecf68345ac6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125344 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125371 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hnkg\" (UniqueName: \"kubernetes.io/projected/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-kube-api-access-6hnkg\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125385 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125393 4603 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125403 4603 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125414 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.125425 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd56eaa-bb04-4612-acfc-ecf68345ac6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.837633 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8dd56eaa-bb04-4612-acfc-ecf68345ac6c","Type":"ContainerDied","Data":"419aa1e715032013f676394a0ba327643cdfb76a3cd38dae017f57afd6d4a83c"} Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.837869 4603 scope.go:117] "RemoveContainer" containerID="72e1db42f7e375de95d9981932530ca93788d738fe9d370039f612e90ff67965" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.837888 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.840956 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57b5d479d8-xlxfm" event={"ID":"850ec93c-cfa1-4bb4-905b-1b8296985c50","Type":"ContainerStarted","Data":"0ce78fd4233fc94c799f63cdb829cc106883830bf42fa9e464fa0e761ef77b54"} Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.840997 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.841103 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.879813 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-57b5d479d8-xlxfm" podStartSLOduration=3.879793222 podStartE2EDuration="3.879793222s" podCreationTimestamp="2025-09-30 20:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:06.865291149 +0000 UTC m=+1228.803749987" watchObservedRunningTime="2025-09-30 20:07:06.879793222 +0000 UTC m=+1228.818252040" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.912232 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.927224 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.954482 4603 scope.go:117] "RemoveContainer" containerID="773fd757172ed85bd7fb8ac1c212797702ae5c05a8ed88df783a9876c404fc50" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.958643 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:06 crc kubenswrapper[4603]: E0930 20:07:06.958993 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.959005 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api" Sep 30 20:07:06 crc kubenswrapper[4603]: E0930 20:07:06.959018 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api-log" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.959025 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api-log" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.977336 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api-log" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.977390 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" containerName="cinder-api" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.978390 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.978477 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.984701 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.984898 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 20:07:06 crc kubenswrapper[4603]: I0930 20:07:06.985001 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059153 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059225 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059247 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnxc7\" (UniqueName: \"kubernetes.io/projected/f8849bc2-be9b-4897-9501-36c14d4e51f2-kube-api-access-bnxc7\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059291 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data-custom\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059323 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8849bc2-be9b-4897-9501-36c14d4e51f2-logs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059343 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8849bc2-be9b-4897-9501-36c14d4e51f2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059371 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059397 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.059456 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-scripts\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.162377 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.162791 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-scripts\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.162860 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.162929 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.162959 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnxc7\" (UniqueName: \"kubernetes.io/projected/f8849bc2-be9b-4897-9501-36c14d4e51f2-kube-api-access-bnxc7\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.163020 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data-custom\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.163064 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8849bc2-be9b-4897-9501-36c14d4e51f2-logs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.163095 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8849bc2-be9b-4897-9501-36c14d4e51f2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.163138 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.168308 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8849bc2-be9b-4897-9501-36c14d4e51f2-logs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.168382 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f8849bc2-be9b-4897-9501-36c14d4e51f2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.169420 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.187878 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.188328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-scripts\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.189276 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.210206 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnxc7\" (UniqueName: \"kubernetes.io/projected/f8849bc2-be9b-4897-9501-36c14d4e51f2-kube-api-access-bnxc7\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.210839 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-config-data-custom\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.211400 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8849bc2-be9b-4897-9501-36c14d4e51f2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f8849bc2-be9b-4897-9501-36c14d4e51f2\") " pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.323423 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.459784 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4265b" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.569258 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config\") pod \"a86c5953-cb61-4f11-b581-eb7698adf7ec\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.570413 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle\") pod \"a86c5953-cb61-4f11-b581-eb7698adf7ec\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.570490 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh6p5\" (UniqueName: \"kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5\") pod \"a86c5953-cb61-4f11-b581-eb7698adf7ec\" (UID: \"a86c5953-cb61-4f11-b581-eb7698adf7ec\") " Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.580181 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5" (OuterVolumeSpecName: "kube-api-access-hh6p5") pod "a86c5953-cb61-4f11-b581-eb7698adf7ec" (UID: "a86c5953-cb61-4f11-b581-eb7698adf7ec"). InnerVolumeSpecName "kube-api-access-hh6p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.608758 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a86c5953-cb61-4f11-b581-eb7698adf7ec" (UID: "a86c5953-cb61-4f11-b581-eb7698adf7ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.619564 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config" (OuterVolumeSpecName: "config") pod "a86c5953-cb61-4f11-b581-eb7698adf7ec" (UID: "a86c5953-cb61-4f11-b581-eb7698adf7ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.672811 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.672855 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a86c5953-cb61-4f11-b581-eb7698adf7ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.672866 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh6p5\" (UniqueName: \"kubernetes.io/projected/a86c5953-cb61-4f11-b581-eb7698adf7ec-kube-api-access-hh6p5\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.859368 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4265b" event={"ID":"a86c5953-cb61-4f11-b581-eb7698adf7ec","Type":"ContainerDied","Data":"ea825d917bfeba0203b60bbc0f3daf96dfe75625255c235f05c9e5ec2f9e945e"} Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.859424 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea825d917bfeba0203b60bbc0f3daf96dfe75625255c235f05c9e5ec2f9e945e" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.860143 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4265b" Sep 30 20:07:07 crc kubenswrapper[4603]: I0930 20:07:07.996046 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.065892 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.066138 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="dnsmasq-dns" containerID="cri-o://5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2" gracePeriod=10 Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.088607 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.101796 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:08 crc kubenswrapper[4603]: E0930 20:07:08.102210 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a86c5953-cb61-4f11-b581-eb7698adf7ec" containerName="neutron-db-sync" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.102222 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a86c5953-cb61-4f11-b581-eb7698adf7ec" containerName="neutron-db-sync" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.102412 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a86c5953-cb61-4f11-b581-eb7698adf7ec" containerName="neutron-db-sync" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.103402 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.131691 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.131925 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.132144 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-dzdhv" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.137679 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.152841 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.154273 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.155965 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.181139 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198638 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198748 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198782 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jrk9\" (UniqueName: \"kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198870 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198920 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.198981 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.199032 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.199143 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.199202 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.199309 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.199360 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42dql\" (UniqueName: \"kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302428 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302490 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302541 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302576 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42dql\" (UniqueName: \"kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302596 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302619 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302652 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jrk9\" (UniqueName: \"kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302696 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302734 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302752 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.302800 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.303760 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.304872 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.309019 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.311370 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.319700 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.327745 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.332188 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.338886 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42dql\" (UniqueName: \"kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql\") pod \"dnsmasq-dns-6bb4fc677f-4m74r\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.383362 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.384072 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.384621 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jrk9\" (UniqueName: \"kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9\") pod \"neutron-64bcfc7484-7pbkj\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.441275 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.441320 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.466707 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.467212 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.831007 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dd56eaa-bb04-4612-acfc-ecf68345ac6c" path="/var/lib/kubelet/pods/8dd56eaa-bb04-4612-acfc-ecf68345ac6c/volumes" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.832407 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:07:08 crc kubenswrapper[4603]: E0930 20:07:08.838515 4603 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2ba9f96_59c1_438e_9b89_228c9dcd2409.slice/crio-conmon-5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956472 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w79mh\" (UniqueName: \"kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956519 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956541 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956624 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956649 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.956790 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb\") pod \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\" (UID: \"c2ba9f96-59c1-438e-9b89-228c9dcd2409\") " Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.969894 4603 generic.go:334] "Generic (PLEG): container finished" podID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerID="5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2" exitCode=0 Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.970200 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" event={"ID":"c2ba9f96-59c1-438e-9b89-228c9dcd2409","Type":"ContainerDied","Data":"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2"} Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.970228 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" event={"ID":"c2ba9f96-59c1-438e-9b89-228c9dcd2409","Type":"ContainerDied","Data":"e1de3abfa3306a90c72d5bd83ff03c756ca811c335e6cbfac7af2194dd11575a"} Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.970244 4603 scope.go:117] "RemoveContainer" containerID="5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.970370 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-674b76c99f-chg9c" Sep 30 20:07:08 crc kubenswrapper[4603]: I0930 20:07:08.989804 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f8849bc2-be9b-4897-9501-36c14d4e51f2","Type":"ContainerStarted","Data":"d2a650b159359db8f757e123fec92a9fd7933ef8c81e2363a0b9933e30934c19"} Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.001543 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh" (OuterVolumeSpecName: "kube-api-access-w79mh") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "kube-api-access-w79mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.069120 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w79mh\" (UniqueName: \"kubernetes.io/projected/c2ba9f96-59c1-438e-9b89-228c9dcd2409-kube-api-access-w79mh\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.074796 4603 scope.go:117] "RemoveContainer" containerID="d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.134538 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.155982 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.180897 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.180934 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.203999 4603 scope.go:117] "RemoveContainer" containerID="5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2" Sep 30 20:07:09 crc kubenswrapper[4603]: E0930 20:07:09.208339 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2\": container with ID starting with 5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2 not found: ID does not exist" containerID="5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.208384 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2"} err="failed to get container status \"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2\": rpc error: code = NotFound desc = could not find container \"5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2\": container with ID starting with 5596f04d3467dc4dacaa12213beb41cba4dfcf0a21cde3c80f48606229da02b2 not found: ID does not exist" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.208407 4603 scope.go:117] "RemoveContainer" containerID="d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c" Sep 30 20:07:09 crc kubenswrapper[4603]: E0930 20:07:09.208918 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c\": container with ID starting with d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c not found: ID does not exist" containerID="d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.208954 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c"} err="failed to get container status \"d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c\": rpc error: code = NotFound desc = could not find container \"d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c\": container with ID starting with d43ff5b712c980ab1a601434e32fe7afc94ec6b3a9ff3ce53c89be0270aceb0c not found: ID does not exist" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.252620 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.263091 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.281860 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.301005 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config" (OuterVolumeSpecName: "config") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.305944 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c2ba9f96-59c1-438e-9b89-228c9dcd2409" (UID: "c2ba9f96-59c1-438e-9b89-228c9dcd2409"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.385295 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.385531 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2ba9f96-59c1-438e-9b89-228c9dcd2409-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.707883 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.720926 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-674b76c99f-chg9c"] Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.729007 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:09 crc kubenswrapper[4603]: I0930 20:07:09.962107 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:09.999933 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" event={"ID":"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb","Type":"ContainerStarted","Data":"446e68de17900efa1a43f499bc2bcca7692a857390b4ee00683905b10cd4cc9d"} Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:10.001285 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerStarted","Data":"2abbd1202b0b4efcdaa78197042855c45d878511c9c66dea467dc1efcecae77c"} Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:10.004626 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f8849bc2-be9b-4897-9501-36c14d4e51f2","Type":"ContainerStarted","Data":"95f9ed4653343efc4c5069c90cb136b50b6b45004535f9c9077bad72355a566e"} Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:10.375714 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:10.436570 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:10 crc kubenswrapper[4603]: I0930 20:07:10.777336 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" path="/var/lib/kubelet/pods/c2ba9f96-59c1-438e-9b89-228c9dcd2409/volumes" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.016058 4603 generic.go:334] "Generic (PLEG): container finished" podID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerID="b56f0400d2ca5cc30d639d5f4f8c1d426e4195c798cf7198b87f9509cacb3ec8" exitCode=0 Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.016120 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" event={"ID":"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb","Type":"ContainerDied","Data":"b56f0400d2ca5cc30d639d5f4f8c1d426e4195c798cf7198b87f9509cacb3ec8"} Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.024029 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="cinder-scheduler" containerID="cri-o://3b9cabac9fd9abf4409391c5c61b0d72ada49954fc1145f58dfee2da6fd4a3da" gracePeriod=30 Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.024067 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerStarted","Data":"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2"} Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.024103 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerStarted","Data":"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f"} Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.024157 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="probe" containerID="cri-o://2bc261f2aa1f2441acd0b701ec42e97b41101633db011782251dc5da1abf7e13" gracePeriod=30 Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.024541 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.075796 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-64bcfc7484-7pbkj" podStartSLOduration=3.075771821 podStartE2EDuration="3.075771821s" podCreationTimestamp="2025-09-30 20:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:11.056196014 +0000 UTC m=+1232.994654842" watchObservedRunningTime="2025-09-30 20:07:11.075771821 +0000 UTC m=+1233.014230649" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.252467 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.252826 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.253775 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96"} pod="openstack/horizon-8575cd6744-wt57f" containerMessage="Container horizon failed startup probe, will be restarted" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.253810 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" containerID="cri-o://ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96" gracePeriod=30 Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.277413 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.277569 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.278434 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd"} pod="openstack/horizon-7cc565dc7d-zt9pz" containerMessage="Container horizon failed startup probe, will be restarted" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.278472 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" containerID="cri-o://f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd" gracePeriod=30 Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.563809 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9cfbc4c69-xfx2z"] Sep 30 20:07:11 crc kubenswrapper[4603]: E0930 20:07:11.564832 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="init" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.564872 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="init" Sep 30 20:07:11 crc kubenswrapper[4603]: E0930 20:07:11.564912 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="dnsmasq-dns" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.564919 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="dnsmasq-dns" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.565120 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ba9f96-59c1-438e-9b89-228c9dcd2409" containerName="dnsmasq-dns" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.566136 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.570542 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.570787 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.594437 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9cfbc4c69-xfx2z"] Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668549 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-ovndb-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668626 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfzkh\" (UniqueName: \"kubernetes.io/projected/6d92de7a-d198-431c-a00c-bf93f63890c0-kube-api-access-cfzkh\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668660 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668799 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-combined-ca-bundle\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668870 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-httpd-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668909 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-public-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.668952 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-internal-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.770862 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-combined-ca-bundle\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771207 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-httpd-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771238 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-public-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771258 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-internal-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771309 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-ovndb-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771330 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfzkh\" (UniqueName: \"kubernetes.io/projected/6d92de7a-d198-431c-a00c-bf93f63890c0-kube-api-access-cfzkh\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.771350 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.780047 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-ovndb-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.780063 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-httpd-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.780427 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-internal-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.780896 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-combined-ca-bundle\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.789031 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-config\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.799663 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d92de7a-d198-431c-a00c-bf93f63890c0-public-tls-certs\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.801222 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfzkh\" (UniqueName: \"kubernetes.io/projected/6d92de7a-d198-431c-a00c-bf93f63890c0-kube-api-access-cfzkh\") pod \"neutron-9cfbc4c69-xfx2z\" (UID: \"6d92de7a-d198-431c-a00c-bf93f63890c0\") " pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:11 crc kubenswrapper[4603]: I0930 20:07:11.893745 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.038347 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f8849bc2-be9b-4897-9501-36c14d4e51f2","Type":"ContainerStarted","Data":"68c938fb98f2b1580664c9703a921775285371daeb64b55cb94a39c22e998301"} Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.039561 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.049511 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" event={"ID":"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb","Type":"ContainerStarted","Data":"44f46618a98a0c6690a31d3937af06c9b94f5ba8ed144bc4f8380f78d38767f0"} Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.049848 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.074380 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.074359194 podStartE2EDuration="6.074359194s" podCreationTimestamp="2025-09-30 20:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:12.067635159 +0000 UTC m=+1234.006093977" watchObservedRunningTime="2025-09-30 20:07:12.074359194 +0000 UTC m=+1234.012818012" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.302743 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.442389 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" podStartSLOduration=4.442359593 podStartE2EDuration="4.442359593s" podCreationTimestamp="2025-09-30 20:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:12.100279505 +0000 UTC m=+1234.038738333" watchObservedRunningTime="2025-09-30 20:07:12.442359593 +0000 UTC m=+1234.380818411" Sep 30 20:07:12 crc kubenswrapper[4603]: I0930 20:07:12.450811 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9cfbc4c69-xfx2z"] Sep 30 20:07:12 crc kubenswrapper[4603]: W0930 20:07:12.478425 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d92de7a_d198_431c_a00c_bf93f63890c0.slice/crio-cf2b8cc38d5ba3d9a06f920b8e6c236be55c3e360f2a11b6ce8ee34859506c26 WatchSource:0}: Error finding container cf2b8cc38d5ba3d9a06f920b8e6c236be55c3e360f2a11b6ce8ee34859506c26: Status 404 returned error can't find the container with id cf2b8cc38d5ba3d9a06f920b8e6c236be55c3e360f2a11b6ce8ee34859506c26 Sep 30 20:07:13 crc kubenswrapper[4603]: I0930 20:07:13.060917 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cfbc4c69-xfx2z" event={"ID":"6d92de7a-d198-431c-a00c-bf93f63890c0","Type":"ContainerStarted","Data":"499d8e6fab74660ed2ceae25fc175971cbf1d09be1ac9e7c088476bc6ef9e5b9"} Sep 30 20:07:13 crc kubenswrapper[4603]: I0930 20:07:13.060976 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cfbc4c69-xfx2z" event={"ID":"6d92de7a-d198-431c-a00c-bf93f63890c0","Type":"ContainerStarted","Data":"cf2b8cc38d5ba3d9a06f920b8e6c236be55c3e360f2a11b6ce8ee34859506c26"} Sep 30 20:07:13 crc kubenswrapper[4603]: I0930 20:07:13.303439 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:13 crc kubenswrapper[4603]: I0930 20:07:13.345412 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:13 crc kubenswrapper[4603]: I0930 20:07:13.629379 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.089015 4603 generic.go:334] "Generic (PLEG): container finished" podID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerID="2bc261f2aa1f2441acd0b701ec42e97b41101633db011782251dc5da1abf7e13" exitCode=0 Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.089042 4603 generic.go:334] "Generic (PLEG): container finished" podID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerID="3b9cabac9fd9abf4409391c5c61b0d72ada49954fc1145f58dfee2da6fd4a3da" exitCode=0 Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.089083 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerDied","Data":"2bc261f2aa1f2441acd0b701ec42e97b41101633db011782251dc5da1abf7e13"} Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.089107 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerDied","Data":"3b9cabac9fd9abf4409391c5c61b0d72ada49954fc1145f58dfee2da6fd4a3da"} Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.099387 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cfbc4c69-xfx2z" event={"ID":"6d92de7a-d198-431c-a00c-bf93f63890c0","Type":"ContainerStarted","Data":"312f69ea996bc57d5d1e75f9012fc7f132dac29e784a91496a3ebda6eeb30ab0"} Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.099467 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.138442 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9cfbc4c69-xfx2z" podStartSLOduration=3.138421646 podStartE2EDuration="3.138421646s" podCreationTimestamp="2025-09-30 20:07:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:14.125562843 +0000 UTC m=+1236.064021661" watchObservedRunningTime="2025-09-30 20:07:14.138421646 +0000 UTC m=+1236.076880474" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.332422 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375062 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92nlz\" (UniqueName: \"kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375135 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375210 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375272 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375299 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375327 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts\") pod \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\" (UID: \"97ef8b3a-73e5-418e-9570-60367bdd7d8c\") " Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.375577 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.389373 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz" (OuterVolumeSpecName: "kube-api-access-92nlz") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "kube-api-access-92nlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.403390 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.413460 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts" (OuterVolumeSpecName: "scripts") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.478276 4603 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97ef8b3a-73e5-418e-9570-60367bdd7d8c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.478397 4603 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.478681 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.478789 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92nlz\" (UniqueName: \"kubernetes.io/projected/97ef8b3a-73e5-418e-9570-60367bdd7d8c-kube-api-access-92nlz\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.497599 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.580476 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.583608 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data" (OuterVolumeSpecName: "config-data") pod "97ef8b3a-73e5-418e-9570-60367bdd7d8c" (UID: "97ef8b3a-73e5-418e-9570-60367bdd7d8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:14 crc kubenswrapper[4603]: I0930 20:07:14.682076 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ef8b3a-73e5-418e-9570-60367bdd7d8c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.084358 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.119652 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.120236 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"97ef8b3a-73e5-418e-9570-60367bdd7d8c","Type":"ContainerDied","Data":"279365ac2394cbb5c13482e6ee72a33811be36506a60e0fb5f40c55cdec82af7"} Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.120782 4603 scope.go:117] "RemoveContainer" containerID="2bc261f2aa1f2441acd0b701ec42e97b41101633db011782251dc5da1abf7e13" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.141584 4603 scope.go:117] "RemoveContainer" containerID="3b9cabac9fd9abf4409391c5c61b0d72ada49954fc1145f58dfee2da6fd4a3da" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.159561 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.178337 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.185373 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:15 crc kubenswrapper[4603]: E0930 20:07:15.185713 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="probe" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.185727 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="probe" Sep 30 20:07:15 crc kubenswrapper[4603]: E0930 20:07:15.185749 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="cinder-scheduler" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.185756 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="cinder-scheduler" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.185906 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="cinder-scheduler" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.185927 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" containerName="probe" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.186821 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.194795 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.201372 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.295379 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.295894 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.296017 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7jlj\" (UniqueName: \"kubernetes.io/projected/b40198c2-8d68-4aab-9744-67114df39cc8-kube-api-access-g7jlj\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.296113 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-scripts\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.296140 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b40198c2-8d68-4aab-9744-67114df39cc8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.296206 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398377 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7jlj\" (UniqueName: \"kubernetes.io/projected/b40198c2-8d68-4aab-9744-67114df39cc8-kube-api-access-g7jlj\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398734 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-scripts\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398760 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b40198c2-8d68-4aab-9744-67114df39cc8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398846 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.398907 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.399750 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b40198c2-8d68-4aab-9744-67114df39cc8-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.404435 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-scripts\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.405260 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.407142 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.407776 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b40198c2-8d68-4aab-9744-67114df39cc8-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.429735 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7jlj\" (UniqueName: \"kubernetes.io/projected/b40198c2-8d68-4aab-9744-67114df39cc8-kube-api-access-g7jlj\") pod \"cinder-scheduler-0\" (UID: \"b40198c2-8d68-4aab-9744-67114df39cc8\") " pod="openstack/cinder-scheduler-0" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.536265 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-c9d79bbb8-vkl5v" Sep 30 20:07:15 crc kubenswrapper[4603]: I0930 20:07:15.579941 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:07:16 crc kubenswrapper[4603]: I0930 20:07:16.252486 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:07:16 crc kubenswrapper[4603]: I0930 20:07:16.809452 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97ef8b3a-73e5-418e-9570-60367bdd7d8c" path="/var/lib/kubelet/pods/97ef8b3a-73e5-418e-9570-60367bdd7d8c/volumes" Sep 30 20:07:17 crc kubenswrapper[4603]: I0930 20:07:17.138996 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b40198c2-8d68-4aab-9744-67114df39cc8","Type":"ContainerStarted","Data":"d6ced05f8f19fe876f693e897cb31fc4b76700a9634ff0626686240d31f9a1da"} Sep 30 20:07:17 crc kubenswrapper[4603]: I0930 20:07:17.299319 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:17 crc kubenswrapper[4603]: I0930 20:07:17.823836 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:17 crc kubenswrapper[4603]: I0930 20:07:17.958814 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.152809 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b40198c2-8d68-4aab-9744-67114df39cc8","Type":"ContainerStarted","Data":"0329512535cec08ce8c3208c9b1a0ce641e81f109eac9b8e8efbc6e913e9a043"} Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.470365 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.548317 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.548779 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="dnsmasq-dns" containerID="cri-o://40d7b23316535d85069ccbfeaf2b76db34b5ca08d18274a44fdc88ebc4543a33" gracePeriod=10 Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.840335 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-8bd66565b-k2wg7" Sep 30 20:07:18 crc kubenswrapper[4603]: I0930 20:07:18.841013 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57b5d479d8-xlxfm" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.020454 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.167178 4603 generic.go:334] "Generic (PLEG): container finished" podID="dae67246-73b8-4810-9f01-2dde949d65ba" containerID="40d7b23316535d85069ccbfeaf2b76db34b5ca08d18274a44fdc88ebc4543a33" exitCode=0 Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.167475 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" event={"ID":"dae67246-73b8-4810-9f01-2dde949d65ba","Type":"ContainerDied","Data":"40d7b23316535d85069ccbfeaf2b76db34b5ca08d18274a44fdc88ebc4543a33"} Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.169072 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" containerID="cri-o://6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4" gracePeriod=30 Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.170233 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b40198c2-8d68-4aab-9744-67114df39cc8","Type":"ContainerStarted","Data":"4522e5b4bd7947f6ed34b3255014c5a0a0523f0c5d0e8d553a67a089b92e57f3"} Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.170498 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-647b495d86-qvfmh" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" containerID="cri-o://f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133" gracePeriod=30 Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.212356 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.212337933 podStartE2EDuration="4.212337933s" podCreationTimestamp="2025-09-30 20:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:19.208175758 +0000 UTC m=+1241.146634576" watchObservedRunningTime="2025-09-30 20:07:19.212337933 +0000 UTC m=+1241.150796751" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.303389 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421712 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421786 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421860 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421886 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7k5q\" (UniqueName: \"kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421922 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.421996 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb\") pod \"dae67246-73b8-4810-9f01-2dde949d65ba\" (UID: \"dae67246-73b8-4810-9f01-2dde949d65ba\") " Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.442477 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q" (OuterVolumeSpecName: "kube-api-access-f7k5q") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "kube-api-access-f7k5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.524557 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7k5q\" (UniqueName: \"kubernetes.io/projected/dae67246-73b8-4810-9f01-2dde949d65ba-kube-api-access-f7k5q\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.560205 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.583669 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.584226 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config" (OuterVolumeSpecName: "config") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.592304 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.595938 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dae67246-73b8-4810-9f01-2dde949d65ba" (UID: "dae67246-73b8-4810-9f01-2dde949d65ba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.626281 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.626308 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.626318 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.626328 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:19 crc kubenswrapper[4603]: I0930 20:07:19.626335 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dae67246-73b8-4810-9f01-2dde949d65ba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.123220 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 20:07:20 crc kubenswrapper[4603]: E0930 20:07:20.123851 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="dnsmasq-dns" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.123917 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="dnsmasq-dns" Sep 30 20:07:20 crc kubenswrapper[4603]: E0930 20:07:20.124042 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="init" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.124102 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="init" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.124385 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" containerName="dnsmasq-dns" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.125071 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.133312 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.138091 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.139661 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.140220 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-dfvtw" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.187053 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" event={"ID":"dae67246-73b8-4810-9f01-2dde949d65ba","Type":"ContainerDied","Data":"70cf6adbddf97d7cf6520db7f6fa758f916bbc893c6842bec5b23879b398e64f"} Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.187411 4603 scope.go:117] "RemoveContainer" containerID="40d7b23316535d85069ccbfeaf2b76db34b5ca08d18274a44fdc88ebc4543a33" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.187614 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-htxzg" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.204463 4603 generic.go:334] "Generic (PLEG): container finished" podID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerID="6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4" exitCode=143 Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.205442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerDied","Data":"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4"} Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.253902 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.253943 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9fv4\" (UniqueName: \"kubernetes.io/projected/0aaa6eda-a979-4944-b575-6b987d1e32f3-kube-api-access-g9fv4\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.254008 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.254059 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.265236 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.265782 4603 scope.go:117] "RemoveContainer" containerID="ed97a6d79923e1e447bd548ca14142dad9a6e73fadf29edc20d53d96a5ff50d3" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.290951 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-htxzg"] Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.355268 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.355325 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9fv4\" (UniqueName: \"kubernetes.io/projected/0aaa6eda-a979-4944-b575-6b987d1e32f3-kube-api-access-g9fv4\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.355426 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.355477 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.357682 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.384915 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.386964 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9fv4\" (UniqueName: \"kubernetes.io/projected/0aaa6eda-a979-4944-b575-6b987d1e32f3-kube-api-access-g9fv4\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.387497 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0aaa6eda-a979-4944-b575-6b987d1e32f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"0aaa6eda-a979-4944-b575-6b987d1e32f3\") " pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.446692 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.582549 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.797333 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae67246-73b8-4810-9f01-2dde949d65ba" path="/var/lib/kubelet/pods/dae67246-73b8-4810-9f01-2dde949d65ba/volumes" Sep 30 20:07:20 crc kubenswrapper[4603]: I0930 20:07:20.860450 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:07:21 crc kubenswrapper[4603]: I0930 20:07:21.212921 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0aaa6eda-a979-4944-b575-6b987d1e32f3","Type":"ContainerStarted","Data":"0907b315ba8447fa5996bb573ce54f5b07f932eaec019459f6541dcc76ca29b3"} Sep 30 20:07:21 crc kubenswrapper[4603]: I0930 20:07:21.329366 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="f8849bc2-be9b-4897-9501-36c14d4e51f2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.228304 4603 generic.go:334] "Generic (PLEG): container finished" podID="53799743-167b-4a74-9cab-3e591a04391b" containerID="f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd" exitCode=0 Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.228605 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerDied","Data":"f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd"} Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.234606 4603 generic.go:334] "Generic (PLEG): container finished" podID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerID="ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96" exitCode=0 Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.234647 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerDied","Data":"ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96"} Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.297356 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-57b5d479d8-xlxfm" podUID="850ec93c-cfa1-4bb4-905b-1b8296985c50" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.328335 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="f8849bc2-be9b-4897-9501-36c14d4e51f2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:07:22 crc kubenswrapper[4603]: I0930 20:07:22.576261 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.102610 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.147754 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs\") pod \"145de1d7-ce63-4c6c-abff-358f4e40d765\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.147887 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data\") pod \"145de1d7-ce63-4c6c-abff-358f4e40d765\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.147914 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle\") pod \"145de1d7-ce63-4c6c-abff-358f4e40d765\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.147949 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2kph\" (UniqueName: \"kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph\") pod \"145de1d7-ce63-4c6c-abff-358f4e40d765\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.148075 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom\") pod \"145de1d7-ce63-4c6c-abff-358f4e40d765\" (UID: \"145de1d7-ce63-4c6c-abff-358f4e40d765\") " Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.148315 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs" (OuterVolumeSpecName: "logs") pod "145de1d7-ce63-4c6c-abff-358f4e40d765" (UID: "145de1d7-ce63-4c6c-abff-358f4e40d765"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.148618 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/145de1d7-ce63-4c6c-abff-358f4e40d765-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.167896 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph" (OuterVolumeSpecName: "kube-api-access-p2kph") pod "145de1d7-ce63-4c6c-abff-358f4e40d765" (UID: "145de1d7-ce63-4c6c-abff-358f4e40d765"). InnerVolumeSpecName "kube-api-access-p2kph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.180422 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "145de1d7-ce63-4c6c-abff-358f4e40d765" (UID: "145de1d7-ce63-4c6c-abff-358f4e40d765"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.206742 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data" (OuterVolumeSpecName: "config-data") pod "145de1d7-ce63-4c6c-abff-358f4e40d765" (UID: "145de1d7-ce63-4c6c-abff-358f4e40d765"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.214142 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "145de1d7-ce63-4c6c-abff-358f4e40d765" (UID: "145de1d7-ce63-4c6c-abff-358f4e40d765"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.250925 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.252060 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.252150 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2kph\" (UniqueName: \"kubernetes.io/projected/145de1d7-ce63-4c6c-abff-358f4e40d765-kube-api-access-p2kph\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.252237 4603 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/145de1d7-ce63-4c6c-abff-358f4e40d765-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.256317 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerStarted","Data":"6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f"} Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.270621 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerStarted","Data":"d571ab44a2d7f3b1f6275257c24c9ca29163fb980cc93a1fc37877a7a81e79af"} Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.293763 4603 generic.go:334] "Generic (PLEG): container finished" podID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerID="f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133" exitCode=0 Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.293802 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerDied","Data":"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133"} Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.293825 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-647b495d86-qvfmh" event={"ID":"145de1d7-ce63-4c6c-abff-358f4e40d765","Type":"ContainerDied","Data":"9ae9a2195a877ac781874aaaa20410bae6961a4d3cfab0c1a6effebda84eed1e"} Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.294015 4603 scope.go:117] "RemoveContainer" containerID="f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.294137 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-647b495d86-qvfmh" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.333904 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.335094 4603 scope.go:117] "RemoveContainer" containerID="6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.341747 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-647b495d86-qvfmh"] Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.367320 4603 scope.go:117] "RemoveContainer" containerID="f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133" Sep 30 20:07:23 crc kubenswrapper[4603]: E0930 20:07:23.369632 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133\": container with ID starting with f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133 not found: ID does not exist" containerID="f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.369752 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133"} err="failed to get container status \"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133\": rpc error: code = NotFound desc = could not find container \"f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133\": container with ID starting with f17d849c279e7a76cae9f82dec0fd3198b5afc577b6ea5d04189c4fa72d46133 not found: ID does not exist" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.369856 4603 scope.go:117] "RemoveContainer" containerID="6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4" Sep 30 20:07:23 crc kubenswrapper[4603]: E0930 20:07:23.374415 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4\": container with ID starting with 6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4 not found: ID does not exist" containerID="6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4" Sep 30 20:07:23 crc kubenswrapper[4603]: I0930 20:07:23.374477 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4"} err="failed to get container status \"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4\": rpc error: code = NotFound desc = could not find container \"6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4\": container with ID starting with 6030a4915d1cb9ebb608c90a92da4289506430a3c8b60ebf5ca83da354331ad4 not found: ID does not exist" Sep 30 20:07:24 crc kubenswrapper[4603]: I0930 20:07:24.776332 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" path="/var/lib/kubelet/pods/145de1d7-ce63-4c6c-abff-358f4e40d765/volumes" Sep 30 20:07:25 crc kubenswrapper[4603]: I0930 20:07:25.246444 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 20:07:25 crc kubenswrapper[4603]: I0930 20:07:25.915111 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 20:07:26 crc kubenswrapper[4603]: I0930 20:07:26.245517 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:07:26 crc kubenswrapper[4603]: I0930 20:07:26.245595 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:07:26 crc kubenswrapper[4603]: I0930 20:07:26.270984 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:07:26 crc kubenswrapper[4603]: I0930 20:07:26.271034 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.006363 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5c4cbd6dd9-49g62"] Sep 30 20:07:27 crc kubenswrapper[4603]: E0930 20:07:27.006895 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.006906 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" Sep 30 20:07:27 crc kubenswrapper[4603]: E0930 20:07:27.006928 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.006934 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.007107 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api-log" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.007122 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="145de1d7-ce63-4c6c-abff-358f4e40d765" containerName="barbican-api" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.008632 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.011344 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.011509 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.011706 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.092095 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5c4cbd6dd9-49g62"] Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122282 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-config-data\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122354 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-internal-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122401 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-public-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122429 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-log-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122470 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvdg6\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-kube-api-access-wvdg6\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122486 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-combined-ca-bundle\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122525 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-etc-swift\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.122548 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-run-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.223936 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-config-data\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224015 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-internal-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224056 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-public-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224087 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-log-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224128 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvdg6\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-kube-api-access-wvdg6\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224147 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-combined-ca-bundle\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224202 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-etc-swift\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224227 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-run-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.224760 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-run-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.226635 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-log-httpd\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.231863 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-config-data\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.235931 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-combined-ca-bundle\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.236993 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-etc-swift\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.237540 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-internal-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.244870 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-public-tls-certs\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.250064 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvdg6\" (UniqueName: \"kubernetes.io/projected/b5e87259-23eb-41cc-ba3a-ad1d47459e6a-kube-api-access-wvdg6\") pod \"swift-proxy-5c4cbd6dd9-49g62\" (UID: \"b5e87259-23eb-41cc-ba3a-ad1d47459e6a\") " pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.326191 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:27 crc kubenswrapper[4603]: I0930 20:07:27.948656 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5c4cbd6dd9-49g62"] Sep 30 20:07:28 crc kubenswrapper[4603]: I0930 20:07:28.354987 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" event={"ID":"b5e87259-23eb-41cc-ba3a-ad1d47459e6a","Type":"ContainerStarted","Data":"d79f737ace79c4f7d4ab51854d65403318d829245a6cbd264562de5f5501a1ce"} Sep 30 20:07:29 crc kubenswrapper[4603]: I0930 20:07:29.378357 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" event={"ID":"b5e87259-23eb-41cc-ba3a-ad1d47459e6a","Type":"ContainerStarted","Data":"45a18a83808854e6025e5d505e7f081decc222610b768bf7fc0cbf12583bf37b"} Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.026473 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.026754 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-central-agent" containerID="cri-o://e008aff623142fdc53980328ef3b7dda52eb27d356e64b551522a5a13749204f" gracePeriod=30 Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.026864 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="proxy-httpd" containerID="cri-o://89eae234742c6e2e93439ed477c47817376c7e75f9385aacf98cbd597b1e47ed" gracePeriod=30 Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.026905 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="sg-core" containerID="cri-o://996edcde2140c54d7f7c150132492fb51e98aea0c9d980747200119274a854fe" gracePeriod=30 Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.026938 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-notification-agent" containerID="cri-o://31d8433fa3dbb7b97bcb05e530912c8db1d862eba5039fb37aac3bc4304e8e5d" gracePeriod=30 Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.385347 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-9nddn"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.386855 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.400950 4603 generic.go:334] "Generic (PLEG): container finished" podID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerID="996edcde2140c54d7f7c150132492fb51e98aea0c9d980747200119274a854fe" exitCode=2 Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.400989 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerDied","Data":"996edcde2140c54d7f7c150132492fb51e98aea0c9d980747200119274a854fe"} Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.444796 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9nddn"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.504098 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-pdg9l"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.506317 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.510398 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpp75\" (UniqueName: \"kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75\") pod \"nova-api-db-create-9nddn\" (UID: \"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a\") " pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.572280 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pdg9l"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.623011 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb2d6\" (UniqueName: \"kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6\") pod \"nova-cell0-db-create-pdg9l\" (UID: \"2eea2891-5c31-482c-bb06-e829ee4348cd\") " pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.623093 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpp75\" (UniqueName: \"kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75\") pod \"nova-api-db-create-9nddn\" (UID: \"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a\") " pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.631270 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-hhjx8"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.632587 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.642978 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpp75\" (UniqueName: \"kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75\") pod \"nova-api-db-create-9nddn\" (UID: \"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a\") " pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.659143 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hhjx8"] Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.710907 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.725829 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb2d6\" (UniqueName: \"kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6\") pod \"nova-cell0-db-create-pdg9l\" (UID: \"2eea2891-5c31-482c-bb06-e829ee4348cd\") " pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.725952 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb9bv\" (UniqueName: \"kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv\") pod \"nova-cell1-db-create-hhjx8\" (UID: \"c01e2132-c5c6-4024-b8a8-38458b50eb14\") " pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.742541 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb2d6\" (UniqueName: \"kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6\") pod \"nova-cell0-db-create-pdg9l\" (UID: \"2eea2891-5c31-482c-bb06-e829ee4348cd\") " pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.828601 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb9bv\" (UniqueName: \"kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv\") pod \"nova-cell1-db-create-hhjx8\" (UID: \"c01e2132-c5c6-4024-b8a8-38458b50eb14\") " pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.856890 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb9bv\" (UniqueName: \"kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv\") pod \"nova-cell1-db-create-hhjx8\" (UID: \"c01e2132-c5c6-4024-b8a8-38458b50eb14\") " pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:30 crc kubenswrapper[4603]: I0930 20:07:30.863183 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.006100 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.417905 4603 generic.go:334] "Generic (PLEG): container finished" podID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerID="89eae234742c6e2e93439ed477c47817376c7e75f9385aacf98cbd597b1e47ed" exitCode=0 Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.417945 4603 generic.go:334] "Generic (PLEG): container finished" podID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerID="31d8433fa3dbb7b97bcb05e530912c8db1d862eba5039fb37aac3bc4304e8e5d" exitCode=0 Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.417956 4603 generic.go:334] "Generic (PLEG): container finished" podID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerID="e008aff623142fdc53980328ef3b7dda52eb27d356e64b551522a5a13749204f" exitCode=0 Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.417980 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerDied","Data":"89eae234742c6e2e93439ed477c47817376c7e75f9385aacf98cbd597b1e47ed"} Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.418010 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerDied","Data":"31d8433fa3dbb7b97bcb05e530912c8db1d862eba5039fb37aac3bc4304e8e5d"} Sep 30 20:07:31 crc kubenswrapper[4603]: I0930 20:07:31.418025 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerDied","Data":"e008aff623142fdc53980328ef3b7dda52eb27d356e64b551522a5a13749204f"} Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.247760 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.273552 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.877090 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958516 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958572 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958681 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958737 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958756 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958894 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.958959 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w64fs\" (UniqueName: \"kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs\") pod \"e5cb044d-61cd-425b-9fba-42f9427172d1\" (UID: \"e5cb044d-61cd-425b-9fba-42f9427172d1\") " Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.961470 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.961570 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.972978 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs" (OuterVolumeSpecName: "kube-api-access-w64fs") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "kube-api-access-w64fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:36 crc kubenswrapper[4603]: I0930 20:07:36.996585 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts" (OuterVolumeSpecName: "scripts") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.014547 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.060569 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.060604 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w64fs\" (UniqueName: \"kubernetes.io/projected/e5cb044d-61cd-425b-9fba-42f9427172d1-kube-api-access-w64fs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.060617 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.060625 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.060634 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5cb044d-61cd-425b-9fba-42f9427172d1-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.063777 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.126348 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data" (OuterVolumeSpecName: "config-data") pod "e5cb044d-61cd-425b-9fba-42f9427172d1" (UID: "e5cb044d-61cd-425b-9fba-42f9427172d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.162727 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.162769 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5cb044d-61cd-425b-9fba-42f9427172d1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:37 crc kubenswrapper[4603]: W0930 20:07:37.187406 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eea2891_5c31_482c_bb06_e829ee4348cd.slice/crio-b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8 WatchSource:0}: Error finding container b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8: Status 404 returned error can't find the container with id b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8 Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.194299 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pdg9l"] Sep 30 20:07:37 crc kubenswrapper[4603]: W0930 20:07:37.242470 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc01e2132_c5c6_4024_b8a8_38458b50eb14.slice/crio-e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d WatchSource:0}: Error finding container e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d: Status 404 returned error can't find the container with id e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.255460 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hhjx8"] Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.265677 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9nddn"] Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.511900 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pdg9l" event={"ID":"2eea2891-5c31-482c-bb06-e829ee4348cd","Type":"ContainerStarted","Data":"62522f9a75213475d390fbff44d7121abdbce5d6413f81455635c28fa06501e0"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.511942 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pdg9l" event={"ID":"2eea2891-5c31-482c-bb06-e829ee4348cd","Type":"ContainerStarted","Data":"b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.514927 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5cb044d-61cd-425b-9fba-42f9427172d1","Type":"ContainerDied","Data":"cc78d12894f649cd0e730f8b9fb5ea8e5d5566bc29d4c5b7f2a9bb9764abc77c"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.514983 4603 scope.go:117] "RemoveContainer" containerID="89eae234742c6e2e93439ed477c47817376c7e75f9385aacf98cbd597b1e47ed" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.514987 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.518481 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9nddn" event={"ID":"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a","Type":"ContainerStarted","Data":"cd8b1600ab56800f68b8589a378a3aaf9ac503c0fde0f279b7a4ca8c3b1ea168"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.518519 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9nddn" event={"ID":"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a","Type":"ContainerStarted","Data":"9bee416f3194f6bfcaac512946c312a8215094395dfe262b3a5316b89e350644"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.519799 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hhjx8" event={"ID":"c01e2132-c5c6-4024-b8a8-38458b50eb14","Type":"ContainerStarted","Data":"6af1b3a8e48f883f887e66087826e735d02918d6ba47bc2dc401412932551a41"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.519837 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hhjx8" event={"ID":"c01e2132-c5c6-4024-b8a8-38458b50eb14","Type":"ContainerStarted","Data":"e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.523490 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" event={"ID":"b5e87259-23eb-41cc-ba3a-ad1d47459e6a","Type":"ContainerStarted","Data":"f8df1ed93b6a86ef8c41c0fc1ff19e023fd778ddd0a3ad24369b7aa6a8f919cd"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.523801 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.523824 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.532709 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0aaa6eda-a979-4944-b575-6b987d1e32f3","Type":"ContainerStarted","Data":"25c879baef8e2d2d8bab71999c47486e713ad51980bad003449afd62060caa8e"} Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.536748 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-pdg9l" podStartSLOduration=7.536729767 podStartE2EDuration="7.536729767s" podCreationTimestamp="2025-09-30 20:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:37.528117071 +0000 UTC m=+1259.466575889" watchObservedRunningTime="2025-09-30 20:07:37.536729767 +0000 UTC m=+1259.475188585" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.538412 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" podUID="b5e87259-23eb-41cc-ba3a-ad1d47459e6a" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.549090 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-9nddn" podStartSLOduration=7.549073866 podStartE2EDuration="7.549073866s" podCreationTimestamp="2025-09-30 20:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:37.542250249 +0000 UTC m=+1259.480709067" watchObservedRunningTime="2025-09-30 20:07:37.549073866 +0000 UTC m=+1259.487532684" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.583254 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-hhjx8" podStartSLOduration=7.583219034 podStartE2EDuration="7.583219034s" podCreationTimestamp="2025-09-30 20:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:37.57948661 +0000 UTC m=+1259.517945428" watchObservedRunningTime="2025-09-30 20:07:37.583219034 +0000 UTC m=+1259.521677852" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.615289 4603 scope.go:117] "RemoveContainer" containerID="996edcde2140c54d7f7c150132492fb51e98aea0c9d980747200119274a854fe" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.625668 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" podStartSLOduration=11.6169715 podStartE2EDuration="11.6169715s" podCreationTimestamp="2025-09-30 20:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:37.60822703 +0000 UTC m=+1259.546685848" watchObservedRunningTime="2025-09-30 20:07:37.6169715 +0000 UTC m=+1259.555430318" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.639458 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.648689 4603 scope.go:117] "RemoveContainer" containerID="31d8433fa3dbb7b97bcb05e530912c8db1d862eba5039fb37aac3bc4304e8e5d" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.654996 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.660618 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.9303219390000002 podStartE2EDuration="17.660591966s" podCreationTimestamp="2025-09-30 20:07:20 +0000 UTC" firstStartedPulling="2025-09-30 20:07:20.809098491 +0000 UTC m=+1242.747557309" lastFinishedPulling="2025-09-30 20:07:36.539368518 +0000 UTC m=+1258.477827336" observedRunningTime="2025-09-30 20:07:37.648628378 +0000 UTC m=+1259.587087196" watchObservedRunningTime="2025-09-30 20:07:37.660591966 +0000 UTC m=+1259.599050784" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.689221 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:37 crc kubenswrapper[4603]: E0930 20:07:37.693476 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="proxy-httpd" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.693513 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="proxy-httpd" Sep 30 20:07:37 crc kubenswrapper[4603]: E0930 20:07:37.693539 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-central-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.693549 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-central-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: E0930 20:07:37.693597 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="sg-core" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.693604 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="sg-core" Sep 30 20:07:37 crc kubenswrapper[4603]: E0930 20:07:37.693639 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-notification-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.693646 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-notification-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.695181 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="proxy-httpd" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.695207 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="sg-core" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.695231 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-notification-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.695256 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" containerName="ceilometer-central-agent" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.709518 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.720969 4603 scope.go:117] "RemoveContainer" containerID="e008aff623142fdc53980328ef3b7dda52eb27d356e64b551522a5a13749204f" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.725385 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.725412 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.737583 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783384 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783454 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783503 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783552 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrq2q\" (UniqueName: \"kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783582 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783630 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.783692 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.884873 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.884940 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.884977 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.885004 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.885065 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrq2q\" (UniqueName: \"kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.885079 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.885116 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.885502 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.888035 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.894993 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.895711 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.904899 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.912516 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:37 crc kubenswrapper[4603]: I0930 20:07:37.921729 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrq2q\" (UniqueName: \"kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q\") pod \"ceilometer-0\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " pod="openstack/ceilometer-0" Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.059173 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.442601 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.442972 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.483069 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.592124 4603 generic.go:334] "Generic (PLEG): container finished" podID="2eea2891-5c31-482c-bb06-e829ee4348cd" containerID="62522f9a75213475d390fbff44d7121abdbce5d6413f81455635c28fa06501e0" exitCode=0 Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.592217 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pdg9l" event={"ID":"2eea2891-5c31-482c-bb06-e829ee4348cd","Type":"ContainerDied","Data":"62522f9a75213475d390fbff44d7121abdbce5d6413f81455635c28fa06501e0"} Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.606553 4603 generic.go:334] "Generic (PLEG): container finished" podID="4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" containerID="cd8b1600ab56800f68b8589a378a3aaf9ac503c0fde0f279b7a4ca8c3b1ea168" exitCode=0 Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.606651 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9nddn" event={"ID":"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a","Type":"ContainerDied","Data":"cd8b1600ab56800f68b8589a378a3aaf9ac503c0fde0f279b7a4ca8c3b1ea168"} Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.625015 4603 generic.go:334] "Generic (PLEG): container finished" podID="c01e2132-c5c6-4024-b8a8-38458b50eb14" containerID="6af1b3a8e48f883f887e66087826e735d02918d6ba47bc2dc401412932551a41" exitCode=0 Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.625154 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hhjx8" event={"ID":"c01e2132-c5c6-4024-b8a8-38458b50eb14","Type":"ContainerDied","Data":"6af1b3a8e48f883f887e66087826e735d02918d6ba47bc2dc401412932551a41"} Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.635457 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" podUID="b5e87259-23eb-41cc-ba3a-ad1d47459e6a" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.708313 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:38 crc kubenswrapper[4603]: I0930 20:07:38.790460 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5cb044d-61cd-425b-9fba-42f9427172d1" path="/var/lib/kubelet/pods/e5cb044d-61cd-425b-9fba-42f9427172d1/volumes" Sep 30 20:07:39 crc kubenswrapper[4603]: I0930 20:07:39.345605 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" podUID="b5e87259-23eb-41cc-ba3a-ad1d47459e6a" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 20:07:39 crc kubenswrapper[4603]: I0930 20:07:39.646513 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerStarted","Data":"521f2ed501175b7ce945a55eb9c154023a1044f7bd93e07d884c0d7b325cfba5"} Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.557145 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.655609 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpp75\" (UniqueName: \"kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75\") pod \"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a\" (UID: \"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a\") " Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.667385 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75" (OuterVolumeSpecName: "kube-api-access-bpp75") pod "4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" (UID: "4f4f28e5-ceda-4147-9bb8-2f7c6142b78a"). InnerVolumeSpecName "kube-api-access-bpp75". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.688315 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerStarted","Data":"2c74c0a2f7f012e96523fcdb2b29eb699cdec45deaeed3d57f817ca6a7a1dbdb"} Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.719367 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9nddn" event={"ID":"4f4f28e5-ceda-4147-9bb8-2f7c6142b78a","Type":"ContainerDied","Data":"9bee416f3194f6bfcaac512946c312a8215094395dfe262b3a5316b89e350644"} Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.719408 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bee416f3194f6bfcaac512946c312a8215094395dfe262b3a5316b89e350644" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.719470 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9nddn" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.766723 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpp75\" (UniqueName: \"kubernetes.io/projected/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a-kube-api-access-bpp75\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.881465 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:40 crc kubenswrapper[4603]: I0930 20:07:40.889108 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.071690 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb2d6\" (UniqueName: \"kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6\") pod \"2eea2891-5c31-482c-bb06-e829ee4348cd\" (UID: \"2eea2891-5c31-482c-bb06-e829ee4348cd\") " Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.072147 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb9bv\" (UniqueName: \"kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv\") pod \"c01e2132-c5c6-4024-b8a8-38458b50eb14\" (UID: \"c01e2132-c5c6-4024-b8a8-38458b50eb14\") " Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.075889 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6" (OuterVolumeSpecName: "kube-api-access-rb2d6") pod "2eea2891-5c31-482c-bb06-e829ee4348cd" (UID: "2eea2891-5c31-482c-bb06-e829ee4348cd"). InnerVolumeSpecName "kube-api-access-rb2d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.076432 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv" (OuterVolumeSpecName: "kube-api-access-xb9bv") pod "c01e2132-c5c6-4024-b8a8-38458b50eb14" (UID: "c01e2132-c5c6-4024-b8a8-38458b50eb14"). InnerVolumeSpecName "kube-api-access-xb9bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.177201 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb9bv\" (UniqueName: \"kubernetes.io/projected/c01e2132-c5c6-4024-b8a8-38458b50eb14-kube-api-access-xb9bv\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.177453 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb2d6\" (UniqueName: \"kubernetes.io/projected/2eea2891-5c31-482c-bb06-e829ee4348cd-kube-api-access-rb2d6\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.728634 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pdg9l" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.731035 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pdg9l" event={"ID":"2eea2891-5c31-482c-bb06-e829ee4348cd","Type":"ContainerDied","Data":"b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8"} Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.731150 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b132e6ecc43bd951055961cae5c0a9dff801e8e40954c6da3aabc60e2bc931c8" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.733102 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hhjx8" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.733627 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hhjx8" event={"ID":"c01e2132-c5c6-4024-b8a8-38458b50eb14","Type":"ContainerDied","Data":"e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d"} Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.733669 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e96baf048e717a5cfc9a513cc70ddc2c393786478585e1e31f0bdb91df2d978d" Sep 30 20:07:41 crc kubenswrapper[4603]: I0930 20:07:41.945466 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9cfbc4c69-xfx2z" Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.007727 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.011372 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64bcfc7484-7pbkj" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-httpd" containerID="cri-o://4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2" gracePeriod=30 Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.013206 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64bcfc7484-7pbkj" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-api" containerID="cri-o://4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f" gracePeriod=30 Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.340477 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.391728 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5c4cbd6dd9-49g62" Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.789975 4603 generic.go:334] "Generic (PLEG): container finished" podID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerID="4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2" exitCode=0 Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.820481 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerDied","Data":"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2"} Sep 30 20:07:42 crc kubenswrapper[4603]: I0930 20:07:42.820521 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerStarted","Data":"741c3b9324d1d9267f398d79739b4c8026bb46f59831ceb3a71ae2132f66b90c"} Sep 30 20:07:43 crc kubenswrapper[4603]: I0930 20:07:43.819091 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerStarted","Data":"387513b8d289ac6584bf5bdaa35fea43da44b16d10959e67a7532508bfb04bbe"} Sep 30 20:07:44 crc kubenswrapper[4603]: I0930 20:07:44.828470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerStarted","Data":"0fc681d22b34c54893908d99a6f215f1ec3288c828db8eb4050c4ea6421e87d4"} Sep 30 20:07:44 crc kubenswrapper[4603]: I0930 20:07:44.828851 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:07:44 crc kubenswrapper[4603]: I0930 20:07:44.852418 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.070753165 podStartE2EDuration="7.852404103s" podCreationTimestamp="2025-09-30 20:07:37 +0000 UTC" firstStartedPulling="2025-09-30 20:07:38.714707774 +0000 UTC m=+1260.653166592" lastFinishedPulling="2025-09-30 20:07:44.496358712 +0000 UTC m=+1266.434817530" observedRunningTime="2025-09-30 20:07:44.847668893 +0000 UTC m=+1266.786127711" watchObservedRunningTime="2025-09-30 20:07:44.852404103 +0000 UTC m=+1266.790862921" Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.248928 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.272679 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.722943 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.844735 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-central-agent" containerID="cri-o://2c74c0a2f7f012e96523fcdb2b29eb699cdec45deaeed3d57f817ca6a7a1dbdb" gracePeriod=30 Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.844777 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="proxy-httpd" containerID="cri-o://0fc681d22b34c54893908d99a6f215f1ec3288c828db8eb4050c4ea6421e87d4" gracePeriod=30 Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.844861 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="sg-core" containerID="cri-o://387513b8d289ac6584bf5bdaa35fea43da44b16d10959e67a7532508bfb04bbe" gracePeriod=30 Sep 30 20:07:46 crc kubenswrapper[4603]: I0930 20:07:46.844908 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-notification-agent" containerID="cri-o://741c3b9324d1d9267f398d79739b4c8026bb46f59831ceb3a71ae2132f66b90c" gracePeriod=30 Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.864685 4603 generic.go:334] "Generic (PLEG): container finished" podID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerID="0fc681d22b34c54893908d99a6f215f1ec3288c828db8eb4050c4ea6421e87d4" exitCode=0 Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865014 4603 generic.go:334] "Generic (PLEG): container finished" podID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerID="387513b8d289ac6584bf5bdaa35fea43da44b16d10959e67a7532508bfb04bbe" exitCode=2 Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865023 4603 generic.go:334] "Generic (PLEG): container finished" podID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerID="741c3b9324d1d9267f398d79739b4c8026bb46f59831ceb3a71ae2132f66b90c" exitCode=0 Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865031 4603 generic.go:334] "Generic (PLEG): container finished" podID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerID="2c74c0a2f7f012e96523fcdb2b29eb699cdec45deaeed3d57f817ca6a7a1dbdb" exitCode=0 Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.864769 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerDied","Data":"0fc681d22b34c54893908d99a6f215f1ec3288c828db8eb4050c4ea6421e87d4"} Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865078 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerDied","Data":"387513b8d289ac6584bf5bdaa35fea43da44b16d10959e67a7532508bfb04bbe"} Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865090 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerDied","Data":"741c3b9324d1d9267f398d79739b4c8026bb46f59831ceb3a71ae2132f66b90c"} Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.865098 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerDied","Data":"2c74c0a2f7f012e96523fcdb2b29eb699cdec45deaeed3d57f817ca6a7a1dbdb"} Sep 30 20:07:47 crc kubenswrapper[4603]: I0930 20:07:47.952630 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104070 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104570 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104601 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104635 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104679 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrq2q\" (UniqueName: \"kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104734 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104736 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.104773 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts\") pod \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\" (UID: \"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2\") " Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.105231 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.105243 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.115403 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts" (OuterVolumeSpecName: "scripts") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.115573 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q" (OuterVolumeSpecName: "kube-api-access-jrq2q") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "kube-api-access-jrq2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.144871 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.204247 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.208322 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.208359 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.208371 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.208382 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.208391 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrq2q\" (UniqueName: \"kubernetes.io/projected/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-kube-api-access-jrq2q\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.232353 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data" (OuterVolumeSpecName: "config-data") pod "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" (UID: "1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.309946 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.441144 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.442303 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-httpd" containerID="cri-o://39f4e803dadc6f72a90b2534271c9f093ed0bfc46c7771ee64238291b8213dbd" gracePeriod=30 Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.442258 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-log" containerID="cri-o://1a078c00d2d665a1e293f8362337b04a4c0a16a8531bfc386d4869264a9f9c0e" gracePeriod=30 Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.876404 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2","Type":"ContainerDied","Data":"521f2ed501175b7ce945a55eb9c154023a1044f7bd93e07d884c0d7b325cfba5"} Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.876474 4603 scope.go:117] "RemoveContainer" containerID="0fc681d22b34c54893908d99a6f215f1ec3288c828db8eb4050c4ea6421e87d4" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.876487 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.880304 4603 generic.go:334] "Generic (PLEG): container finished" podID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerID="1a078c00d2d665a1e293f8362337b04a4c0a16a8531bfc386d4869264a9f9c0e" exitCode=143 Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.880352 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerDied","Data":"1a078c00d2d665a1e293f8362337b04a4c0a16a8531bfc386d4869264a9f9c0e"} Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.904482 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.908237 4603 scope.go:117] "RemoveContainer" containerID="387513b8d289ac6584bf5bdaa35fea43da44b16d10959e67a7532508bfb04bbe" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.925595 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.942133 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944315 4603 scope.go:117] "RemoveContainer" containerID="741c3b9324d1d9267f398d79739b4c8026bb46f59831ceb3a71ae2132f66b90c" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944761 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c01e2132-c5c6-4024-b8a8-38458b50eb14" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944789 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c01e2132-c5c6-4024-b8a8-38458b50eb14" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944814 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-central-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944821 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-central-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944830 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="sg-core" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944838 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="sg-core" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944856 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-notification-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944862 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-notification-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944889 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944896 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944908 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="proxy-httpd" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944914 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="proxy-httpd" Sep 30 20:07:48 crc kubenswrapper[4603]: E0930 20:07:48.944932 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eea2891-5c31-482c-bb06-e829ee4348cd" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.944938 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eea2891-5c31-482c-bb06-e829ee4348cd" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945190 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c01e2132-c5c6-4024-b8a8-38458b50eb14" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945199 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="sg-core" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945220 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-central-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945228 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="proxy-httpd" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945239 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" containerName="ceilometer-notification-agent" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945246 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.945256 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eea2891-5c31-482c-bb06-e829ee4348cd" containerName="mariadb-database-create" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.946881 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.951654 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.951748 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:07:48 crc kubenswrapper[4603]: I0930 20:07:48.963822 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.004369 4603 scope.go:117] "RemoveContainer" containerID="2c74c0a2f7f012e96523fcdb2b29eb699cdec45deaeed3d57f817ca6a7a1dbdb" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.022977 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023019 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023058 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023099 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023133 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023171 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjqr9\" (UniqueName: \"kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.023193 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.124631 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125082 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125033 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125754 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125814 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125864 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125893 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjqr9\" (UniqueName: \"kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.125919 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.126123 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.133201 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.133693 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.136076 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.151692 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjqr9\" (UniqueName: \"kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.163636 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts\") pod \"ceilometer-0\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.267749 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.795709 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:49 crc kubenswrapper[4603]: I0930 20:07:49.888573 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerStarted","Data":"676f1dd1b37ea3024a4dff5a70d6de0a90531f83fb9fd26a44ccc3b03f3af502"} Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.721729 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5122-account-create-ps78f"] Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.723647 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.733843 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.737858 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5122-account-create-ps78f"] Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.777299 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8fpx\" (UniqueName: \"kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx\") pod \"nova-api-5122-account-create-ps78f\" (UID: \"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2\") " pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.786902 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2" path="/var/lib/kubelet/pods/1ea6c833-4d3c-4b50-a7ed-e6e9fedfb8a2/volumes" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.872229 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cba5-account-create-mr2xq"] Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.874295 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.879186 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8fpx\" (UniqueName: \"kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx\") pod \"nova-api-5122-account-create-ps78f\" (UID: \"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2\") " pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.879682 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.889246 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cba5-account-create-mr2xq"] Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.931336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8fpx\" (UniqueName: \"kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx\") pod \"nova-api-5122-account-create-ps78f\" (UID: \"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2\") " pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.933266 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerStarted","Data":"50e4b2b7daf50ed713f7078fef6b873bc88fbd0d447c99b88cebd9f8c9922299"} Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.982221 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdf5v\" (UniqueName: \"kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v\") pod \"nova-cell0-cba5-account-create-mr2xq\" (UID: \"fe01d8e6-6f43-47f3-a31f-e8ecbc536785\") " pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.988210 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3142-account-create-2kf4s"] Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.989382 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:50 crc kubenswrapper[4603]: I0930 20:07:50.991853 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.025583 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3142-account-create-2kf4s"] Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.055748 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.083809 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pnv5\" (UniqueName: \"kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5\") pod \"nova-cell1-3142-account-create-2kf4s\" (UID: \"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63\") " pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.083945 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdf5v\" (UniqueName: \"kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v\") pod \"nova-cell0-cba5-account-create-mr2xq\" (UID: \"fe01d8e6-6f43-47f3-a31f-e8ecbc536785\") " pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.102759 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdf5v\" (UniqueName: \"kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v\") pod \"nova-cell0-cba5-account-create-mr2xq\" (UID: \"fe01d8e6-6f43-47f3-a31f-e8ecbc536785\") " pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.185404 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pnv5\" (UniqueName: \"kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5\") pod \"nova-cell1-3142-account-create-2kf4s\" (UID: \"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63\") " pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.194357 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.209224 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pnv5\" (UniqueName: \"kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5\") pod \"nova-cell1-3142-account-create-2kf4s\" (UID: \"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63\") " pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.334162 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.352578 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5122-account-create-ps78f"] Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.926954 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3142-account-create-2kf4s"] Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.979712 4603 generic.go:334] "Generic (PLEG): container finished" podID="02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" containerID="be872dbf9dcea86dd7dc4d41756de80754b59d61704f406c73460bb937bde769" exitCode=0 Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.979781 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5122-account-create-ps78f" event={"ID":"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2","Type":"ContainerDied","Data":"be872dbf9dcea86dd7dc4d41756de80754b59d61704f406c73460bb937bde769"} Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.979806 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5122-account-create-ps78f" event={"ID":"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2","Type":"ContainerStarted","Data":"8de58e0daf3c46b950d515a149331ec634bbb091c7b035f6c8378a6f55c672e6"} Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.981524 4603 generic.go:334] "Generic (PLEG): container finished" podID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerID="39f4e803dadc6f72a90b2534271c9f093ed0bfc46c7771ee64238291b8213dbd" exitCode=0 Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.981565 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerDied","Data":"39f4e803dadc6f72a90b2534271c9f093ed0bfc46c7771ee64238291b8213dbd"} Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.985065 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3142-account-create-2kf4s" event={"ID":"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63","Type":"ContainerStarted","Data":"e4940a9d2d28dec125492107de39f7260c8de0f359b89b4eb0c6e00cad0f1205"} Sep 30 20:07:51 crc kubenswrapper[4603]: I0930 20:07:51.998490 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerStarted","Data":"6ec57fd41e181ab7402dbc2defdfd3ac64b13acaadca1ce9353ce89c0f9e8dbe"} Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.033576 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cba5-account-create-mr2xq"] Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.196564 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213447 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213529 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213561 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213620 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213670 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tknzx\" (UniqueName: \"kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213686 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213749 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.213824 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts\") pod \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\" (UID: \"d09701f3-eb4c-4104-a482-d85b4f3f89a7\") " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.214214 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs" (OuterVolumeSpecName: "logs") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.219458 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.220656 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.225386 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx" (OuterVolumeSpecName: "kube-api-access-tknzx") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "kube-api-access-tknzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.233450 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts" (OuterVolumeSpecName: "scripts") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.318498 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.318552 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.318566 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.318580 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tknzx\" (UniqueName: \"kubernetes.io/projected/d09701f3-eb4c-4104-a482-d85b4f3f89a7-kube-api-access-tknzx\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.318594 4603 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d09701f3-eb4c-4104-a482-d85b4f3f89a7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.513912 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.546083 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data" (OuterVolumeSpecName: "config-data") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.564781 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.564837 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.574691 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.575303 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d09701f3-eb4c-4104-a482-d85b4f3f89a7" (UID: "d09701f3-eb4c-4104-a482-d85b4f3f89a7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.666535 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:52 crc kubenswrapper[4603]: I0930 20:07:52.666572 4603 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09701f3-eb4c-4104-a482-d85b4f3f89a7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.053523 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.061113 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d09701f3-eb4c-4104-a482-d85b4f3f89a7","Type":"ContainerDied","Data":"a3cd06c2e01ad71710ca659783ec6ba5f480dcc26d3e78b3187d33d6f9c3b9a0"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.061185 4603 scope.go:117] "RemoveContainer" containerID="39f4e803dadc6f72a90b2534271c9f093ed0bfc46c7771ee64238291b8213dbd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.061363 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.075464 4603 generic.go:334] "Generic (PLEG): container finished" podID="fe01d8e6-6f43-47f3-a31f-e8ecbc536785" containerID="ff8ed9fb071fc43e165463822d66a4b7aed5d5ab103f1e48d9f7ce6a529ccd28" exitCode=0 Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.075545 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cba5-account-create-mr2xq" event={"ID":"fe01d8e6-6f43-47f3-a31f-e8ecbc536785","Type":"ContainerDied","Data":"ff8ed9fb071fc43e165463822d66a4b7aed5d5ab103f1e48d9f7ce6a529ccd28"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.075570 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cba5-account-create-mr2xq" event={"ID":"fe01d8e6-6f43-47f3-a31f-e8ecbc536785","Type":"ContainerStarted","Data":"cbdacf6f23f35ed37c0b2ac0a16b0128c6f4584d3f9c71fa16c98ad6fd8b3dab"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.124055 4603 generic.go:334] "Generic (PLEG): container finished" podID="d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" containerID="50ec0e1d60ef266801438be0090b207031dac3cea909ed39f946d928fecdf0f3" exitCode=0 Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.126477 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3142-account-create-2kf4s" event={"ID":"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63","Type":"ContainerDied","Data":"50ec0e1d60ef266801438be0090b207031dac3cea909ed39f946d928fecdf0f3"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.134920 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.145207 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.145756 4603 generic.go:334] "Generic (PLEG): container finished" podID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerID="4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f" exitCode=0 Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.145885 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64bcfc7484-7pbkj" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.146474 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerDied","Data":"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.146503 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64bcfc7484-7pbkj" event={"ID":"de8cce1f-54bd-43ab-ba6d-13d804e9955f","Type":"ContainerDied","Data":"2abbd1202b0b4efcdaa78197042855c45d878511c9c66dea467dc1efcecae77c"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.172634 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerStarted","Data":"e0295986fbc6eb370202e52c7c60c7997f6c035f59c0629c77dca09e3986c90c"} Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.176487 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jrk9\" (UniqueName: \"kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9\") pod \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.176757 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs\") pod \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.177313 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config\") pod \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.177440 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config\") pod \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.177585 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle\") pod \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\" (UID: \"de8cce1f-54bd-43ab-ba6d-13d804e9955f\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.179833 4603 scope.go:117] "RemoveContainer" containerID="1a078c00d2d665a1e293f8362337b04a4c0a16a8531bfc386d4869264a9f9c0e" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.184639 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9" (OuterVolumeSpecName: "kube-api-access-8jrk9") pod "de8cce1f-54bd-43ab-ba6d-13d804e9955f" (UID: "de8cce1f-54bd-43ab-ba6d-13d804e9955f"). InnerVolumeSpecName "kube-api-access-8jrk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.185476 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "de8cce1f-54bd-43ab-ba6d-13d804e9955f" (UID: "de8cce1f-54bd-43ab-ba6d-13d804e9955f"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226252 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.226681 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-api" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226693 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-api" Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.226711 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226717 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.226747 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-log" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226755 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-log" Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.226763 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226768 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226926 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-api" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226941 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226960 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" containerName="glance-log" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.226973 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" containerName="neutron-httpd" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.228256 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.230912 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.238479 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.266673 4603 scope.go:117] "RemoveContainer" containerID="4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285402 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbz5l\" (UniqueName: \"kubernetes.io/projected/c24dd570-1c48-407f-bb26-0d85ab367883-kube-api-access-wbz5l\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285465 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-scripts\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285534 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-config-data\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285557 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285633 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285658 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285680 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285731 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-logs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285793 4603 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.285807 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jrk9\" (UniqueName: \"kubernetes.io/projected/de8cce1f-54bd-43ab-ba6d-13d804e9955f-kube-api-access-8jrk9\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.286098 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.312806 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.381703 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de8cce1f-54bd-43ab-ba6d-13d804e9955f" (UID: "de8cce1f-54bd-43ab-ba6d-13d804e9955f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.389263 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.389488 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.389573 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.389987 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-logs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.390112 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbz5l\" (UniqueName: \"kubernetes.io/projected/c24dd570-1c48-407f-bb26-0d85ab367883-kube-api-access-wbz5l\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.390216 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-scripts\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.390327 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-config-data\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.390398 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.390533 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.393751 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-logs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.394049 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c24dd570-1c48-407f-bb26-0d85ab367883-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.394479 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.409056 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.415585 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config" (OuterVolumeSpecName: "config") pod "de8cce1f-54bd-43ab-ba6d-13d804e9955f" (UID: "de8cce1f-54bd-43ab-ba6d-13d804e9955f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.431339 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "de8cce1f-54bd-43ab-ba6d-13d804e9955f" (UID: "de8cce1f-54bd-43ab-ba6d-13d804e9955f"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.432980 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-config-data\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.434674 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-scripts\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.444492 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbz5l\" (UniqueName: \"kubernetes.io/projected/c24dd570-1c48-407f-bb26-0d85ab367883-kube-api-access-wbz5l\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.465895 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c24dd570-1c48-407f-bb26-0d85ab367883-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.492533 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.492650 4603 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8cce1f-54bd-43ab-ba6d-13d804e9955f-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.532452 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c24dd570-1c48-407f-bb26-0d85ab367883\") " pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.548635 4603 scope.go:117] "RemoveContainer" containerID="4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.570489 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.580489 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.586674 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-64bcfc7484-7pbkj"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.634533 4603 scope.go:117] "RemoveContainer" containerID="4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2" Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.634879 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2\": container with ID starting with 4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2 not found: ID does not exist" containerID="4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.634905 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2"} err="failed to get container status \"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2\": rpc error: code = NotFound desc = could not find container \"4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2\": container with ID starting with 4f6f148726453975a6c937a876ac510a596852747d98c89ad2c6916af00d51e2 not found: ID does not exist" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.634925 4603 scope.go:117] "RemoveContainer" containerID="4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f" Sep 30 20:07:53 crc kubenswrapper[4603]: E0930 20:07:53.635088 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f\": container with ID starting with 4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f not found: ID does not exist" containerID="4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.635104 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f"} err="failed to get container status \"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f\": rpc error: code = NotFound desc = could not find container \"4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f\": container with ID starting with 4dd55384211e661ca1c3c17d223ca7fb8466ad95baafecea700d1532eac78a9f not found: ID does not exist" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.730834 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.740673 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.740892 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-log" containerID="cri-o://d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13" gracePeriod=30 Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.741036 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-httpd" containerID="cri-o://e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7" gracePeriod=30 Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.911732 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8fpx\" (UniqueName: \"kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx\") pod \"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2\" (UID: \"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2\") " Sep 30 20:07:53 crc kubenswrapper[4603]: I0930 20:07:53.919319 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx" (OuterVolumeSpecName: "kube-api-access-z8fpx") pod "02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" (UID: "02e7f5a1-3809-4c70-8bfe-ac06acb64ac2"). InnerVolumeSpecName "kube-api-access-z8fpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.018753 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8fpx\" (UniqueName: \"kubernetes.io/projected/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2-kube-api-access-z8fpx\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.201702 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5122-account-create-ps78f" event={"ID":"02e7f5a1-3809-4c70-8bfe-ac06acb64ac2","Type":"ContainerDied","Data":"8de58e0daf3c46b950d515a149331ec634bbb091c7b035f6c8378a6f55c672e6"} Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.202093 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8de58e0daf3c46b950d515a149331ec634bbb091c7b035f6c8378a6f55c672e6" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.201859 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5122-account-create-ps78f" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.368481 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.711992 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.733752 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.754573 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdf5v\" (UniqueName: \"kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v\") pod \"fe01d8e6-6f43-47f3-a31f-e8ecbc536785\" (UID: \"fe01d8e6-6f43-47f3-a31f-e8ecbc536785\") " Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.810303 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d09701f3-eb4c-4104-a482-d85b4f3f89a7" path="/var/lib/kubelet/pods/d09701f3-eb4c-4104-a482-d85b4f3f89a7/volumes" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.811537 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de8cce1f-54bd-43ab-ba6d-13d804e9955f" path="/var/lib/kubelet/pods/de8cce1f-54bd-43ab-ba6d-13d804e9955f/volumes" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.821013 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v" (OuterVolumeSpecName: "kube-api-access-vdf5v") pod "fe01d8e6-6f43-47f3-a31f-e8ecbc536785" (UID: "fe01d8e6-6f43-47f3-a31f-e8ecbc536785"). InnerVolumeSpecName "kube-api-access-vdf5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.858094 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pnv5\" (UniqueName: \"kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5\") pod \"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63\" (UID: \"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63\") " Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.858811 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdf5v\" (UniqueName: \"kubernetes.io/projected/fe01d8e6-6f43-47f3-a31f-e8ecbc536785-kube-api-access-vdf5v\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.866656 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5" (OuterVolumeSpecName: "kube-api-access-4pnv5") pod "d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" (UID: "d0e45af4-c8e3-4dbd-94f3-3742b31b6e63"). InnerVolumeSpecName "kube-api-access-4pnv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:54 crc kubenswrapper[4603]: I0930 20:07:54.960253 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pnv5\" (UniqueName: \"kubernetes.io/projected/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63-kube-api-access-4pnv5\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.224436 4603 generic.go:334] "Generic (PLEG): container finished" podID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerID="d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13" exitCode=143 Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.224520 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerDied","Data":"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13"} Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.233625 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cba5-account-create-mr2xq" event={"ID":"fe01d8e6-6f43-47f3-a31f-e8ecbc536785","Type":"ContainerDied","Data":"cbdacf6f23f35ed37c0b2ac0a16b0128c6f4584d3f9c71fa16c98ad6fd8b3dab"} Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.233830 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbdacf6f23f35ed37c0b2ac0a16b0128c6f4584d3f9c71fa16c98ad6fd8b3dab" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.233914 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cba5-account-create-mr2xq" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.238944 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3142-account-create-2kf4s" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.239678 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3142-account-create-2kf4s" event={"ID":"d0e45af4-c8e3-4dbd-94f3-3742b31b6e63","Type":"ContainerDied","Data":"e4940a9d2d28dec125492107de39f7260c8de0f359b89b4eb0c6e00cad0f1205"} Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.239720 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4940a9d2d28dec125492107de39f7260c8de0f359b89b4eb0c6e00cad0f1205" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.271676 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-central-agent" containerID="cri-o://50e4b2b7daf50ed713f7078fef6b873bc88fbd0d447c99b88cebd9f8c9922299" gracePeriod=30 Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.272039 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerStarted","Data":"ede8d95883968e7141d05c7ff515c69c10ec9b3db508ff8bc7181af8d7ef8e65"} Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.272080 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.272399 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="proxy-httpd" containerID="cri-o://ede8d95883968e7141d05c7ff515c69c10ec9b3db508ff8bc7181af8d7ef8e65" gracePeriod=30 Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.272479 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="sg-core" containerID="cri-o://e0295986fbc6eb370202e52c7c60c7997f6c035f59c0629c77dca09e3986c90c" gracePeriod=30 Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.272549 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-notification-agent" containerID="cri-o://6ec57fd41e181ab7402dbc2defdfd3ac64b13acaadca1ce9353ce89c0f9e8dbe" gracePeriod=30 Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.294825 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c24dd570-1c48-407f-bb26-0d85ab367883","Type":"ContainerStarted","Data":"e158618a8eefc5f501197f5ca5605738af696cdb049e156f51167aadd6b3f9cf"} Sep 30 20:07:55 crc kubenswrapper[4603]: I0930 20:07:55.297156 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.845445642 podStartE2EDuration="7.297140224s" podCreationTimestamp="2025-09-30 20:07:48 +0000 UTC" firstStartedPulling="2025-09-30 20:07:49.810125691 +0000 UTC m=+1271.748584509" lastFinishedPulling="2025-09-30 20:07:54.261820273 +0000 UTC m=+1276.200279091" observedRunningTime="2025-09-30 20:07:55.294308717 +0000 UTC m=+1277.232767535" watchObservedRunningTime="2025-09-30 20:07:55.297140224 +0000 UTC m=+1277.235599042" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.246840 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.247222 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.248001 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f"} pod="openstack/horizon-8575cd6744-wt57f" containerMessage="Container horizon failed startup probe, will be restarted" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.248044 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" containerID="cri-o://6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f" gracePeriod=30 Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.272338 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.272402 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.273041 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"d571ab44a2d7f3b1f6275257c24c9ca29163fb980cc93a1fc37877a7a81e79af"} pod="openstack/horizon-7cc565dc7d-zt9pz" containerMessage="Container horizon failed startup probe, will be restarted" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.273236 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" containerID="cri-o://d571ab44a2d7f3b1f6275257c24c9ca29163fb980cc93a1fc37877a7a81e79af" gracePeriod=30 Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.307532 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9pth"] Sep 30 20:07:56 crc kubenswrapper[4603]: E0930 20:07:56.307903 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.307923 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: E0930 20:07:56.307942 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe01d8e6-6f43-47f3-a31f-e8ecbc536785" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.307950 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe01d8e6-6f43-47f3-a31f-e8ecbc536785" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: E0930 20:07:56.307984 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.307990 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.308179 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.308198 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe01d8e6-6f43-47f3-a31f-e8ecbc536785" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.308220 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" containerName="mariadb-account-create" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.315621 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.320045 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.320264 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.320483 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8ms7h" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322224 4603 generic.go:334] "Generic (PLEG): container finished" podID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerID="ede8d95883968e7141d05c7ff515c69c10ec9b3db508ff8bc7181af8d7ef8e65" exitCode=0 Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322251 4603 generic.go:334] "Generic (PLEG): container finished" podID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerID="e0295986fbc6eb370202e52c7c60c7997f6c035f59c0629c77dca09e3986c90c" exitCode=2 Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322259 4603 generic.go:334] "Generic (PLEG): container finished" podID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerID="6ec57fd41e181ab7402dbc2defdfd3ac64b13acaadca1ce9353ce89c0f9e8dbe" exitCode=0 Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322301 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerDied","Data":"ede8d95883968e7141d05c7ff515c69c10ec9b3db508ff8bc7181af8d7ef8e65"} Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322330 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerDied","Data":"e0295986fbc6eb370202e52c7c60c7997f6c035f59c0629c77dca09e3986c90c"} Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.322339 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerDied","Data":"6ec57fd41e181ab7402dbc2defdfd3ac64b13acaadca1ce9353ce89c0f9e8dbe"} Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.332074 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c24dd570-1c48-407f-bb26-0d85ab367883","Type":"ContainerStarted","Data":"c30d8f882638aaed97f331d25d9e7769b62e075d9f33b83c780df5eafb89ffec"} Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.332117 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c24dd570-1c48-407f-bb26-0d85ab367883","Type":"ContainerStarted","Data":"3390fcf3fc093365fc52ed1fde4ff7450a5800ee755305083edef56e31c83400"} Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.332747 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9pth"] Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.383946 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.383927318 podStartE2EDuration="3.383927318s" podCreationTimestamp="2025-09-30 20:07:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:07:56.377017788 +0000 UTC m=+1278.315476606" watchObservedRunningTime="2025-09-30 20:07:56.383927318 +0000 UTC m=+1278.322386136" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.389915 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njlnm\" (UniqueName: \"kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.389964 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.389999 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.390020 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.491374 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njlnm\" (UniqueName: \"kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.491430 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.491461 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.491488 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.497128 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.498359 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.499072 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.515687 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njlnm\" (UniqueName: \"kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm\") pod \"nova-cell0-conductor-db-sync-c9pth\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:56 crc kubenswrapper[4603]: I0930 20:07:56.655550 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:07:57 crc kubenswrapper[4603]: I0930 20:07:57.162302 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9pth"] Sep 30 20:07:57 crc kubenswrapper[4603]: I0930 20:07:57.171776 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:07:57 crc kubenswrapper[4603]: I0930 20:07:57.341841 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9pth" event={"ID":"f2e05975-2977-47a5-9881-2b0996dfc973","Type":"ContainerStarted","Data":"0d2b0e6d150459325071b36e874bbf331b01408ccdd25376b854018c8cf25ff6"} Sep 30 20:07:58 crc kubenswrapper[4603]: I0930 20:07:58.885274 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039599 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039709 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mt8b\" (UniqueName: \"kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039767 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039791 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039820 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039849 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039942 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.039982 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle\") pod \"9b9cb86d-897c-445a-bdc1-96aee550d17d\" (UID: \"9b9cb86d-897c-445a-bdc1-96aee550d17d\") " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.041349 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.046967 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.051024 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b" (OuterVolumeSpecName: "kube-api-access-2mt8b") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "kube-api-access-2mt8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.056459 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs" (OuterVolumeSpecName: "logs") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.060047 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts" (OuterVolumeSpecName: "scripts") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.124243 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147913 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147933 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147958 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147968 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mt8b\" (UniqueName: \"kubernetes.io/projected/9b9cb86d-897c-445a-bdc1-96aee550d17d-kube-api-access-2mt8b\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147977 4603 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b9cb86d-897c-445a-bdc1-96aee550d17d-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.147984 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.154577 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data" (OuterVolumeSpecName: "config-data") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.163546 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9b9cb86d-897c-445a-bdc1-96aee550d17d" (UID: "9b9cb86d-897c-445a-bdc1-96aee550d17d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.187883 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.249898 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.249982 4603 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.249997 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9cb86d-897c-445a-bdc1-96aee550d17d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.372758 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerDied","Data":"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7"} Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.372778 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.372836 4603 scope.go:117] "RemoveContainer" containerID="e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.373064 4603 generic.go:334] "Generic (PLEG): container finished" podID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerID="e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7" exitCode=0 Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.373097 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b9cb86d-897c-445a-bdc1-96aee550d17d","Type":"ContainerDied","Data":"cb2aeefd75959497e63e3ac0f13b2ee27a7d10a8ae16ff1db181191ac73c664f"} Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.406788 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.419898 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.427052 4603 scope.go:117] "RemoveContainer" containerID="d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.454675 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:07:59 crc kubenswrapper[4603]: E0930 20:07:59.463484 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-log" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.463513 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-log" Sep 30 20:07:59 crc kubenswrapper[4603]: E0930 20:07:59.463578 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-httpd" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.463584 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-httpd" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.463837 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-log" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.463852 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" containerName="glance-httpd" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.466785 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.466884 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.472554 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.476849 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.542614 4603 scope.go:117] "RemoveContainer" containerID="e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7" Sep 30 20:07:59 crc kubenswrapper[4603]: E0930 20:07:59.555059 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7\": container with ID starting with e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7 not found: ID does not exist" containerID="e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.555099 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7"} err="failed to get container status \"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7\": rpc error: code = NotFound desc = could not find container \"e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7\": container with ID starting with e171e91e4bb94e9eb95edfc2347d830c19994575b05b12aa8470dab4850c37e7 not found: ID does not exist" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.555124 4603 scope.go:117] "RemoveContainer" containerID="d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556030 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556095 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556119 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556149 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556193 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svxkf\" (UniqueName: \"kubernetes.io/projected/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-kube-api-access-svxkf\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556215 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556244 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.556266 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: E0930 20:07:59.558792 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13\": container with ID starting with d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13 not found: ID does not exist" containerID="d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.558826 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13"} err="failed to get container status \"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13\": rpc error: code = NotFound desc = could not find container \"d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13\": container with ID starting with d091ada7fb41eb147ed6e20fb0816f84a6663ebc74b709d3669da48136f27b13 not found: ID does not exist" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.659922 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660048 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660073 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660144 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660202 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svxkf\" (UniqueName: \"kubernetes.io/projected/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-kube-api-access-svxkf\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660235 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660279 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.660302 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.661258 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.663644 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.664336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.666355 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.670043 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.675522 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.686952 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.697416 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.702900 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svxkf\" (UniqueName: \"kubernetes.io/projected/60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7-kube-api-access-svxkf\") pod \"glance-default-internal-api-0\" (UID: \"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:07:59 crc kubenswrapper[4603]: I0930 20:07:59.851683 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:00 crc kubenswrapper[4603]: I0930 20:08:00.530890 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:08:00 crc kubenswrapper[4603]: W0930 20:08:00.548290 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60324d02_1dfc_4bb9_b4c6_227bf3c3e3b7.slice/crio-273c5711b86de9ab045f1a78f39bf306162686496a729d3f24778ef03b68adf3 WatchSource:0}: Error finding container 273c5711b86de9ab045f1a78f39bf306162686496a729d3f24778ef03b68adf3: Status 404 returned error can't find the container with id 273c5711b86de9ab045f1a78f39bf306162686496a729d3f24778ef03b68adf3 Sep 30 20:08:00 crc kubenswrapper[4603]: I0930 20:08:00.793454 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b9cb86d-897c-445a-bdc1-96aee550d17d" path="/var/lib/kubelet/pods/9b9cb86d-897c-445a-bdc1-96aee550d17d/volumes" Sep 30 20:08:01 crc kubenswrapper[4603]: I0930 20:08:01.415360 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7","Type":"ContainerStarted","Data":"af62ad6d8a76d27501a521fb97e7ed7463acd98193c45bf94c935bd4517c6ffe"} Sep 30 20:08:01 crc kubenswrapper[4603]: I0930 20:08:01.415554 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7","Type":"ContainerStarted","Data":"273c5711b86de9ab045f1a78f39bf306162686496a729d3f24778ef03b68adf3"} Sep 30 20:08:02 crc kubenswrapper[4603]: I0930 20:08:02.433143 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7","Type":"ContainerStarted","Data":"48c819ee3a5727fa41e24a85b8bd6ce5b1640e73b34ef4a2fc02e35825525600"} Sep 30 20:08:02 crc kubenswrapper[4603]: I0930 20:08:02.457407 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.457391224 podStartE2EDuration="3.457391224s" podCreationTimestamp="2025-09-30 20:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:02.453123977 +0000 UTC m=+1284.391582795" watchObservedRunningTime="2025-09-30 20:08:02.457391224 +0000 UTC m=+1284.395850042" Sep 30 20:08:03 crc kubenswrapper[4603]: I0930 20:08:03.583431 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:08:03 crc kubenswrapper[4603]: I0930 20:08:03.583492 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:08:03 crc kubenswrapper[4603]: I0930 20:08:03.633237 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:08:03 crc kubenswrapper[4603]: I0930 20:08:03.658992 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:08:04 crc kubenswrapper[4603]: I0930 20:08:04.455583 4603 generic.go:334] "Generic (PLEG): container finished" podID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerID="50e4b2b7daf50ed713f7078fef6b873bc88fbd0d447c99b88cebd9f8c9922299" exitCode=0 Sep 30 20:08:04 crc kubenswrapper[4603]: I0930 20:08:04.455663 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerDied","Data":"50e4b2b7daf50ed713f7078fef6b873bc88fbd0d447c99b88cebd9f8c9922299"} Sep 30 20:08:04 crc kubenswrapper[4603]: I0930 20:08:04.456670 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:08:04 crc kubenswrapper[4603]: I0930 20:08:04.456933 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:08:06 crc kubenswrapper[4603]: I0930 20:08:06.475853 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:08:06 crc kubenswrapper[4603]: I0930 20:08:06.477110 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:08:07 crc kubenswrapper[4603]: I0930 20:08:07.322262 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:08:07 crc kubenswrapper[4603]: I0930 20:08:07.368407 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:08:08 crc kubenswrapper[4603]: I0930 20:08:08.441200 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:08:08 crc kubenswrapper[4603]: I0930 20:08:08.441485 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:08:08 crc kubenswrapper[4603]: I0930 20:08:08.441518 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:08:08 crc kubenswrapper[4603]: I0930 20:08:08.442103 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:08:08 crc kubenswrapper[4603]: I0930 20:08:08.442145 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156" gracePeriod=600 Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.536815 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156" exitCode=0 Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.536899 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156"} Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.537181 4603 scope.go:117] "RemoveContainer" containerID="dc95dc69b8a8b292c75e23592495a77e80c4146bdc34d0d432b4703d42c315a7" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.793899 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.852833 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.852887 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.894857 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.904467 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.967802 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.967866 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.967929 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.967956 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.968065 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.968134 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.968219 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjqr9\" (UniqueName: \"kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9\") pod \"88062f88-19dd-4f17-993e-b8dd17d4983c\" (UID: \"88062f88-19dd-4f17-993e-b8dd17d4983c\") " Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.968296 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.968912 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.969281 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.979482 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts" (OuterVolumeSpecName: "scripts") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:09 crc kubenswrapper[4603]: I0930 20:08:09.994640 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9" (OuterVolumeSpecName: "kube-api-access-rjqr9") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "kube-api-access-rjqr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.009290 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.072248 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88062f88-19dd-4f17-993e-b8dd17d4983c-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.072281 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.072290 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.072300 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjqr9\" (UniqueName: \"kubernetes.io/projected/88062f88-19dd-4f17-993e-b8dd17d4983c-kube-api-access-rjqr9\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.112853 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.139080 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data" (OuterVolumeSpecName: "config-data") pod "88062f88-19dd-4f17-993e-b8dd17d4983c" (UID: "88062f88-19dd-4f17-993e-b8dd17d4983c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.174252 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.174289 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88062f88-19dd-4f17-993e-b8dd17d4983c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.554433 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9pth" event={"ID":"f2e05975-2977-47a5-9881-2b0996dfc973","Type":"ContainerStarted","Data":"3b1fe09d26a9e478bf47b6644e1f5446d71d7104e5e50bd861f418d41cf03dde"} Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.559612 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"88062f88-19dd-4f17-993e-b8dd17d4983c","Type":"ContainerDied","Data":"676f1dd1b37ea3024a4dff5a70d6de0a90531f83fb9fd26a44ccc3b03f3af502"} Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.559654 4603 scope.go:117] "RemoveContainer" containerID="ede8d95883968e7141d05c7ff515c69c10ec9b3db508ff8bc7181af8d7ef8e65" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.559670 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.577575 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576"} Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.578576 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.578603 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.586821 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-c9pth" podStartSLOduration=1.759139507 podStartE2EDuration="14.586803401s" podCreationTimestamp="2025-09-30 20:07:56 +0000 UTC" firstStartedPulling="2025-09-30 20:07:57.171574052 +0000 UTC m=+1279.110032870" lastFinishedPulling="2025-09-30 20:08:09.999237956 +0000 UTC m=+1291.937696764" observedRunningTime="2025-09-30 20:08:10.582633726 +0000 UTC m=+1292.521092534" watchObservedRunningTime="2025-09-30 20:08:10.586803401 +0000 UTC m=+1292.525262219" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.617406 4603 scope.go:117] "RemoveContainer" containerID="e0295986fbc6eb370202e52c7c60c7997f6c035f59c0629c77dca09e3986c90c" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.646675 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.648300 4603 scope.go:117] "RemoveContainer" containerID="6ec57fd41e181ab7402dbc2defdfd3ac64b13acaadca1ce9353ce89c0f9e8dbe" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.666243 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.670932 4603 scope.go:117] "RemoveContainer" containerID="50e4b2b7daf50ed713f7078fef6b873bc88fbd0d447c99b88cebd9f8c9922299" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.689249 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:10 crc kubenswrapper[4603]: E0930 20:08:10.689980 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="sg-core" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.691181 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="sg-core" Sep 30 20:08:10 crc kubenswrapper[4603]: E0930 20:08:10.691293 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="proxy-httpd" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.691381 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="proxy-httpd" Sep 30 20:08:10 crc kubenswrapper[4603]: E0930 20:08:10.691478 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-central-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.691551 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-central-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: E0930 20:08:10.691649 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-notification-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.691736 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-notification-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.692037 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-notification-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.692176 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="proxy-httpd" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.692293 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="sg-core" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.692375 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" containerName="ceilometer-central-agent" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.694549 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.696126 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.705303 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.705504 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.773702 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88062f88-19dd-4f17-993e-b8dd17d4983c" path="/var/lib/kubelet/pods/88062f88-19dd-4f17-993e-b8dd17d4983c/volumes" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793015 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793069 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793300 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793347 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793392 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793530 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.793654 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxrmv\" (UniqueName: \"kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895521 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895642 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxrmv\" (UniqueName: \"kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895716 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895757 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895855 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895914 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.895999 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.897562 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.897742 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.903876 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.904081 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.904139 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.904396 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:10 crc kubenswrapper[4603]: I0930 20:08:10.924700 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxrmv\" (UniqueName: \"kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv\") pod \"ceilometer-0\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " pod="openstack/ceilometer-0" Sep 30 20:08:11 crc kubenswrapper[4603]: I0930 20:08:11.018522 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:11 crc kubenswrapper[4603]: I0930 20:08:11.479383 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:11 crc kubenswrapper[4603]: W0930 20:08:11.482843 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7a6516e_bba5_425d_9772_76eca64356af.slice/crio-e09bc265b4a707859915ed25de1b88cae5966807ededbd1e6c5eeb796eca5a33 WatchSource:0}: Error finding container e09bc265b4a707859915ed25de1b88cae5966807ededbd1e6c5eeb796eca5a33: Status 404 returned error can't find the container with id e09bc265b4a707859915ed25de1b88cae5966807ededbd1e6c5eeb796eca5a33 Sep 30 20:08:11 crc kubenswrapper[4603]: I0930 20:08:11.604956 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerStarted","Data":"e09bc265b4a707859915ed25de1b88cae5966807ededbd1e6c5eeb796eca5a33"} Sep 30 20:08:12 crc kubenswrapper[4603]: I0930 20:08:12.851229 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:12 crc kubenswrapper[4603]: I0930 20:08:12.851777 4603 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:08:13 crc kubenswrapper[4603]: I0930 20:08:13.000992 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:08:13 crc kubenswrapper[4603]: I0930 20:08:13.623643 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerStarted","Data":"6e809cf3af4c2feca444a3a83bfbdb62baebcd275a836482f55cf6b30fe6f210"} Sep 30 20:08:14 crc kubenswrapper[4603]: I0930 20:08:14.634426 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerStarted","Data":"1604b46be6d8edba6407dbc787c08e3faa60b271510398aecc742d96dbdea246"} Sep 30 20:08:14 crc kubenswrapper[4603]: I0930 20:08:14.634953 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerStarted","Data":"d00d91f71e5d99763fe347c026053169a935e68481d7e76fc55bca15e5da7d0f"} Sep 30 20:08:16 crc kubenswrapper[4603]: I0930 20:08:16.656861 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerStarted","Data":"814e53e14aa3b0230df52940125ea1de393c8a64cf9b9e804329ea518df71b89"} Sep 30 20:08:16 crc kubenswrapper[4603]: I0930 20:08:16.658392 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.778890 4603 generic.go:334] "Generic (PLEG): container finished" podID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerID="6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f" exitCode=137 Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.778990 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerDied","Data":"6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f"} Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.779546 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerStarted","Data":"9e154543257947fee23e9ce76eec503f256afbb70115d93c4c6b2cc1ce92634e"} Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.779576 4603 scope.go:117] "RemoveContainer" containerID="ce86089b80e43e015f6bfb070428ab4d12acdf7966b46025bf2a1688f12d3e96" Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.783503 4603 generic.go:334] "Generic (PLEG): container finished" podID="53799743-167b-4a74-9cab-3e591a04391b" containerID="d571ab44a2d7f3b1f6275257c24c9ca29163fb980cc93a1fc37877a7a81e79af" exitCode=137 Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.783555 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerDied","Data":"d571ab44a2d7f3b1f6275257c24c9ca29163fb980cc93a1fc37877a7a81e79af"} Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.783587 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cc565dc7d-zt9pz" event={"ID":"53799743-167b-4a74-9cab-3e591a04391b","Type":"ContainerStarted","Data":"65031f94512f1df2326b1ab454efa98a3334327d0febe2e1303aaaf78d1425f9"} Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.824934 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=12.525029386 podStartE2EDuration="16.824910812s" podCreationTimestamp="2025-09-30 20:08:10 +0000 UTC" firstStartedPulling="2025-09-30 20:08:11.485389289 +0000 UTC m=+1293.423848107" lastFinishedPulling="2025-09-30 20:08:15.785270715 +0000 UTC m=+1297.723729533" observedRunningTime="2025-09-30 20:08:16.692488171 +0000 UTC m=+1298.630946999" watchObservedRunningTime="2025-09-30 20:08:26.824910812 +0000 UTC m=+1308.763369650" Sep 30 20:08:26 crc kubenswrapper[4603]: I0930 20:08:26.968945 4603 scope.go:117] "RemoveContainer" containerID="f2df7b791500b2f88eba69b711fc888a443c4f3d2f45d56ba30b839df9e7d7fd" Sep 30 20:08:28 crc kubenswrapper[4603]: I0930 20:08:28.809557 4603 generic.go:334] "Generic (PLEG): container finished" podID="f2e05975-2977-47a5-9881-2b0996dfc973" containerID="3b1fe09d26a9e478bf47b6644e1f5446d71d7104e5e50bd861f418d41cf03dde" exitCode=0 Sep 30 20:08:28 crc kubenswrapper[4603]: I0930 20:08:28.809713 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9pth" event={"ID":"f2e05975-2977-47a5-9881-2b0996dfc973","Type":"ContainerDied","Data":"3b1fe09d26a9e478bf47b6644e1f5446d71d7104e5e50bd861f418d41cf03dde"} Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.222236 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.368694 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle\") pod \"f2e05975-2977-47a5-9881-2b0996dfc973\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.368809 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts\") pod \"f2e05975-2977-47a5-9881-2b0996dfc973\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.368890 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njlnm\" (UniqueName: \"kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm\") pod \"f2e05975-2977-47a5-9881-2b0996dfc973\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.368946 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data\") pod \"f2e05975-2977-47a5-9881-2b0996dfc973\" (UID: \"f2e05975-2977-47a5-9881-2b0996dfc973\") " Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.374400 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts" (OuterVolumeSpecName: "scripts") pod "f2e05975-2977-47a5-9881-2b0996dfc973" (UID: "f2e05975-2977-47a5-9881-2b0996dfc973"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.389370 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm" (OuterVolumeSpecName: "kube-api-access-njlnm") pod "f2e05975-2977-47a5-9881-2b0996dfc973" (UID: "f2e05975-2977-47a5-9881-2b0996dfc973"). InnerVolumeSpecName "kube-api-access-njlnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.399500 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data" (OuterVolumeSpecName: "config-data") pod "f2e05975-2977-47a5-9881-2b0996dfc973" (UID: "f2e05975-2977-47a5-9881-2b0996dfc973"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.404009 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2e05975-2977-47a5-9881-2b0996dfc973" (UID: "f2e05975-2977-47a5-9881-2b0996dfc973"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.471334 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.472007 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.472080 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njlnm\" (UniqueName: \"kubernetes.io/projected/f2e05975-2977-47a5-9881-2b0996dfc973-kube-api-access-njlnm\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.472144 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2e05975-2977-47a5-9881-2b0996dfc973-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.837685 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9pth" event={"ID":"f2e05975-2977-47a5-9881-2b0996dfc973","Type":"ContainerDied","Data":"0d2b0e6d150459325071b36e874bbf331b01408ccdd25376b854018c8cf25ff6"} Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.838043 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d2b0e6d150459325071b36e874bbf331b01408ccdd25376b854018c8cf25ff6" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.837761 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9pth" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.969442 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:08:30 crc kubenswrapper[4603]: E0930 20:08:30.970198 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2e05975-2977-47a5-9881-2b0996dfc973" containerName="nova-cell0-conductor-db-sync" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.970218 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2e05975-2977-47a5-9881-2b0996dfc973" containerName="nova-cell0-conductor-db-sync" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.970506 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2e05975-2977-47a5-9881-2b0996dfc973" containerName="nova-cell0-conductor-db-sync" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.971235 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.973386 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8ms7h" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.982900 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 20:08:30 crc kubenswrapper[4603]: I0930 20:08:30.993563 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.082390 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.082571 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.082629 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmr27\" (UniqueName: \"kubernetes.io/projected/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-kube-api-access-pmr27\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.184425 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.185392 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.185528 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmr27\" (UniqueName: \"kubernetes.io/projected/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-kube-api-access-pmr27\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.194116 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.200930 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.212671 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmr27\" (UniqueName: \"kubernetes.io/projected/6f9d13ef-768e-47e0-aa2d-f21e801a8e3b-kube-api-access-pmr27\") pod \"nova-cell0-conductor-0\" (UID: \"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.320351 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.780376 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:08:31 crc kubenswrapper[4603]: I0930 20:08:31.851406 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b","Type":"ContainerStarted","Data":"3fc67a7bcc6469ae1aa517aae7da6295b837a15f206b8aef26f428b0bc4a59aa"} Sep 30 20:08:32 crc kubenswrapper[4603]: I0930 20:08:32.864793 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6f9d13ef-768e-47e0-aa2d-f21e801a8e3b","Type":"ContainerStarted","Data":"d1be3acd2c478903bec5439b4ec1ca7670db639366e439a86e34c54cd276d1a3"} Sep 30 20:08:32 crc kubenswrapper[4603]: I0930 20:08:32.865247 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:32 crc kubenswrapper[4603]: I0930 20:08:32.885974 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.885959259 podStartE2EDuration="2.885959259s" podCreationTimestamp="2025-09-30 20:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:32.885706012 +0000 UTC m=+1314.824164830" watchObservedRunningTime="2025-09-30 20:08:32.885959259 +0000 UTC m=+1314.824418067" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.247066 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.248317 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.248496 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.271943 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.272047 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.274522 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.375788 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.988290 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-f6hwp"] Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.989356 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.991975 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 20:08:36 crc kubenswrapper[4603]: I0930 20:08:36.998055 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.004046 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6hwp"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.018522 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.018659 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.018983 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.019027 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwjnm\" (UniqueName: \"kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.120828 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.120911 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.120944 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwjnm\" (UniqueName: \"kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.121330 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.126961 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.127666 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.132460 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.158772 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwjnm\" (UniqueName: \"kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm\") pod \"nova-cell0-cell-mapping-f6hwp\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.206068 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.227746 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.242501 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.242683 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.268669 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.270082 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.292616 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.299237 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.311016 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.333897 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-948ph\" (UniqueName: \"kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.333978 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.352524 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.352604 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r9hn\" (UniqueName: \"kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.352656 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.352763 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.352900 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.440935 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.442540 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.454934 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.455769 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-948ph\" (UniqueName: \"kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.455937 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.456058 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.456178 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r9hn\" (UniqueName: \"kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.456290 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.456469 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.457585 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.462031 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.463281 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.476448 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.478956 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.480253 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.497274 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.530436 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-948ph\" (UniqueName: \"kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph\") pod \"nova-scheduler-0\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.571692 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r9hn\" (UniqueName: \"kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn\") pod \"nova-metadata-0\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.572251 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.573424 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.573443 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.573474 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.573523 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k2lw\" (UniqueName: \"kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.603407 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.604926 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.608189 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.614353 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.615800 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.615945 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.659239 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675364 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675430 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k2lw\" (UniqueName: \"kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675453 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns6qq\" (UniqueName: \"kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675480 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675519 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675550 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675574 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675590 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5982k\" (UniqueName: \"kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675612 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675643 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675679 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675693 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.675726 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.676039 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.686963 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.691776 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.702308 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k2lw\" (UniqueName: \"kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw\") pod \"nova-api-0\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " pod="openstack/nova-api-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.702911 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776759 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns6qq\" (UniqueName: \"kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776806 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776850 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776882 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776912 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776938 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5982k\" (UniqueName: \"kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776959 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.776991 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.777045 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.779975 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.780583 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.781089 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.786527 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.787410 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.788127 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.795290 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.808075 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns6qq\" (UniqueName: \"kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq\") pod \"nova-cell1-novncproxy-0\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.810920 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5982k\" (UniqueName: \"kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k\") pod \"dnsmasq-dns-865f5d856f-xvn9n\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:37 crc kubenswrapper[4603]: I0930 20:08:37.925282 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:37.991822 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.004148 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.138658 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.206486 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.271730 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6hwp"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.649234 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:08:38 crc kubenswrapper[4603]: W0930 20:08:38.674030 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb49ca741_a549_4ac3_bdc4_90b59a52692a.slice/crio-4fc50e073671f337685a9714ae760fdb878644344d3ebd1c3e4632c1b7c123d5 WatchSource:0}: Error finding container 4fc50e073671f337685a9714ae760fdb878644344d3ebd1c3e4632c1b7c123d5: Status 404 returned error can't find the container with id 4fc50e073671f337685a9714ae760fdb878644344d3ebd1c3e4632c1b7c123d5 Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.743237 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7j69x"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.744467 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.754078 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7j69x"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.755509 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.755789 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.797039 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.821001 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.821042 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.821152 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qq7f\" (UniqueName: \"kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.821213 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.923030 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.923086 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.923218 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.923280 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qq7f\" (UniqueName: \"kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.923349 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.930131 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.930899 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.932433 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:38 crc kubenswrapper[4603]: I0930 20:08:38.939743 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qq7f\" (UniqueName: \"kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f\") pod \"nova-cell1-conductor-db-sync-7j69x\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.012438 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerStarted","Data":"596a15290e5d7a30cf2c71e03a175c6aa64966f5010ff4e7e3ebaea81f006821"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.015010 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b49ca741-a549-4ac3-bdc4-90b59a52692a","Type":"ContainerStarted","Data":"4fc50e073671f337685a9714ae760fdb878644344d3ebd1c3e4632c1b7c123d5"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.020501 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" event={"ID":"af4c3ff4-bc2f-47f8-8bd2-f074eb888943","Type":"ContainerStarted","Data":"afcf12aa1d8cada8bb2caec341559b29de412f84aecd7f820782d13228d9d3d1"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.030075 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6hwp" event={"ID":"8983ec27-7bbe-4844-a826-9a7ce168e605","Type":"ContainerStarted","Data":"11968f3e62894d2d51facf2e5f013b5bb7a5ef9b63bfcf80db93b19386ac53b7"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.030358 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6hwp" event={"ID":"8983ec27-7bbe-4844-a826-9a7ce168e605","Type":"ContainerStarted","Data":"60dbb6c3b236479027f9a7c816085dd7838fc0294ee560786d300c824b61653b"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.044932 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9bcf0dc8-f669-44d5-a72f-824e5f3329dc","Type":"ContainerStarted","Data":"59b801e1e37bc726bbe370cf6305675b49fc1c85044e7de9ca4f81e5170892b2"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.052107 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-f6hwp" podStartSLOduration=3.052082328 podStartE2EDuration="3.052082328s" podCreationTimestamp="2025-09-30 20:08:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:39.050387021 +0000 UTC m=+1320.988845839" watchObservedRunningTime="2025-09-30 20:08:39.052082328 +0000 UTC m=+1320.990541146" Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.053970 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerStarted","Data":"3f1da5d721eacd2c210b2fede65a3de2591d1e90d1f75701a1bac9cb13dd6a26"} Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.096954 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:39 crc kubenswrapper[4603]: I0930 20:08:39.751683 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7j69x"] Sep 30 20:08:39 crc kubenswrapper[4603]: W0930 20:08:39.766269 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9336de02_cccd_4f98_a9b1_472833b48fca.slice/crio-72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae WatchSource:0}: Error finding container 72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae: Status 404 returned error can't find the container with id 72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae Sep 30 20:08:40 crc kubenswrapper[4603]: I0930 20:08:40.066011 4603 generic.go:334] "Generic (PLEG): container finished" podID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerID="5abaa07b4ed6714d046bc1e21a13131057564bd7f6518aedda47d8ee9523eb1f" exitCode=0 Sep 30 20:08:40 crc kubenswrapper[4603]: I0930 20:08:40.066081 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" event={"ID":"af4c3ff4-bc2f-47f8-8bd2-f074eb888943","Type":"ContainerDied","Data":"5abaa07b4ed6714d046bc1e21a13131057564bd7f6518aedda47d8ee9523eb1f"} Sep 30 20:08:40 crc kubenswrapper[4603]: I0930 20:08:40.071279 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7j69x" event={"ID":"9336de02-cccd-4f98-a9b1-472833b48fca","Type":"ContainerStarted","Data":"f34e580619c851a1dd914b19fd9ef2e253f2fc43e51f7273612341ab1b9f05d8"} Sep 30 20:08:40 crc kubenswrapper[4603]: I0930 20:08:40.071453 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7j69x" event={"ID":"9336de02-cccd-4f98-a9b1-472833b48fca","Type":"ContainerStarted","Data":"72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae"} Sep 30 20:08:40 crc kubenswrapper[4603]: I0930 20:08:40.125666 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-7j69x" podStartSLOduration=2.125651298 podStartE2EDuration="2.125651298s" podCreationTimestamp="2025-09-30 20:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:40.12388507 +0000 UTC m=+1322.062343888" watchObservedRunningTime="2025-09-30 20:08:40.125651298 +0000 UTC m=+1322.064110116" Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.028380 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.108913 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" event={"ID":"af4c3ff4-bc2f-47f8-8bd2-f074eb888943","Type":"ContainerStarted","Data":"2a5cae4640a91cbeac4e709e92f8ea738a5939cf9336230d90eaf508fafcce52"} Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.109368 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.129994 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" podStartSLOduration=4.129980719 podStartE2EDuration="4.129980719s" podCreationTimestamp="2025-09-30 20:08:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:41.125006732 +0000 UTC m=+1323.063465550" watchObservedRunningTime="2025-09-30 20:08:41.129980719 +0000 UTC m=+1323.068439537" Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.470282 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:08:41 crc kubenswrapper[4603]: I0930 20:08:41.486028 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.150881 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9bcf0dc8-f669-44d5-a72f-824e5f3329dc","Type":"ContainerStarted","Data":"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.155008 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerStarted","Data":"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.155048 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerStarted","Data":"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.155188 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-log" containerID="cri-o://84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" gracePeriod=30 Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.156321 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-metadata" containerID="cri-o://be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" gracePeriod=30 Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.162131 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerStarted","Data":"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.162527 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerStarted","Data":"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.168109 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b49ca741-a549-4ac3-bdc4-90b59a52692a","Type":"ContainerStarted","Data":"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc"} Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.168265 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b49ca741-a549-4ac3-bdc4-90b59a52692a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc" gracePeriod=30 Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.210742 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.849243463 podStartE2EDuration="8.210726882s" podCreationTimestamp="2025-09-30 20:08:37 +0000 UTC" firstStartedPulling="2025-09-30 20:08:38.285344397 +0000 UTC m=+1320.223803215" lastFinishedPulling="2025-09-30 20:08:43.646827816 +0000 UTC m=+1325.585286634" observedRunningTime="2025-09-30 20:08:45.206436545 +0000 UTC m=+1327.144895363" watchObservedRunningTime="2025-09-30 20:08:45.210726882 +0000 UTC m=+1327.149185700" Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.216389 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.801364309 podStartE2EDuration="8.216373737s" podCreationTimestamp="2025-09-30 20:08:37 +0000 UTC" firstStartedPulling="2025-09-30 20:08:38.231276133 +0000 UTC m=+1320.169734951" lastFinishedPulling="2025-09-30 20:08:43.646285561 +0000 UTC m=+1325.584744379" observedRunningTime="2025-09-30 20:08:45.188352888 +0000 UTC m=+1327.126811716" watchObservedRunningTime="2025-09-30 20:08:45.216373737 +0000 UTC m=+1327.154832555" Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.234022 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.372459072 podStartE2EDuration="8.234004471s" podCreationTimestamp="2025-09-30 20:08:37 +0000 UTC" firstStartedPulling="2025-09-30 20:08:38.785200664 +0000 UTC m=+1320.723659482" lastFinishedPulling="2025-09-30 20:08:43.646746063 +0000 UTC m=+1325.585204881" observedRunningTime="2025-09-30 20:08:45.228070258 +0000 UTC m=+1327.166529076" watchObservedRunningTime="2025-09-30 20:08:45.234004471 +0000 UTC m=+1327.172463289" Sep 30 20:08:45 crc kubenswrapper[4603]: I0930 20:08:45.254869 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.265658651 podStartE2EDuration="8.254852063s" podCreationTimestamp="2025-09-30 20:08:37 +0000 UTC" firstStartedPulling="2025-09-30 20:08:38.677436957 +0000 UTC m=+1320.615895775" lastFinishedPulling="2025-09-30 20:08:43.666630369 +0000 UTC m=+1325.605089187" observedRunningTime="2025-09-30 20:08:45.243905132 +0000 UTC m=+1327.182363950" watchObservedRunningTime="2025-09-30 20:08:45.254852063 +0000 UTC m=+1327.193310881" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.105295 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182380 4603 generic.go:334] "Generic (PLEG): container finished" podID="59b20933-c293-4b5e-829b-53263ab166ab" containerID="be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" exitCode=0 Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182411 4603 generic.go:334] "Generic (PLEG): container finished" podID="59b20933-c293-4b5e-829b-53263ab166ab" containerID="84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" exitCode=143 Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182743 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerDied","Data":"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7"} Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182772 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerDied","Data":"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790"} Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182782 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"59b20933-c293-4b5e-829b-53263ab166ab","Type":"ContainerDied","Data":"3f1da5d721eacd2c210b2fede65a3de2591d1e90d1f75701a1bac9cb13dd6a26"} Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.182797 4603 scope.go:117] "RemoveContainer" containerID="be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.183531 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.210028 4603 scope.go:117] "RemoveContainer" containerID="84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.230684 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data\") pod \"59b20933-c293-4b5e-829b-53263ab166ab\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.230755 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle\") pod \"59b20933-c293-4b5e-829b-53263ab166ab\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.230820 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs\") pod \"59b20933-c293-4b5e-829b-53263ab166ab\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.231294 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r9hn\" (UniqueName: \"kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn\") pod \"59b20933-c293-4b5e-829b-53263ab166ab\" (UID: \"59b20933-c293-4b5e-829b-53263ab166ab\") " Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.234425 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs" (OuterVolumeSpecName: "logs") pod "59b20933-c293-4b5e-829b-53263ab166ab" (UID: "59b20933-c293-4b5e-829b-53263ab166ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.239816 4603 scope.go:117] "RemoveContainer" containerID="be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" Sep 30 20:08:46 crc kubenswrapper[4603]: E0930 20:08:46.246908 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7\": container with ID starting with be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7 not found: ID does not exist" containerID="be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.246968 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7"} err="failed to get container status \"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7\": rpc error: code = NotFound desc = could not find container \"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7\": container with ID starting with be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7 not found: ID does not exist" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.246992 4603 scope.go:117] "RemoveContainer" containerID="84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.247117 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:08:46 crc kubenswrapper[4603]: E0930 20:08:46.248184 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790\": container with ID starting with 84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790 not found: ID does not exist" containerID="84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.248207 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790"} err="failed to get container status \"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790\": rpc error: code = NotFound desc = could not find container \"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790\": container with ID starting with 84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790 not found: ID does not exist" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.248224 4603 scope.go:117] "RemoveContainer" containerID="be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.248752 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7"} err="failed to get container status \"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7\": rpc error: code = NotFound desc = could not find container \"be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7\": container with ID starting with be89163a162a40d849a3a45ec9d5b23dac80e8c850cbe77a04b31e65881860e7 not found: ID does not exist" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.248785 4603 scope.go:117] "RemoveContainer" containerID="84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.249157 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790"} err="failed to get container status \"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790\": rpc error: code = NotFound desc = could not find container \"84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790\": container with ID starting with 84477f54bf7befcb6ec97dc43ddd0811adcd1ce08b26bb93dbd3936e4ad83790 not found: ID does not exist" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.254582 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn" (OuterVolumeSpecName: "kube-api-access-4r9hn") pod "59b20933-c293-4b5e-829b-53263ab166ab" (UID: "59b20933-c293-4b5e-829b-53263ab166ab"). InnerVolumeSpecName "kube-api-access-4r9hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.258655 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59b20933-c293-4b5e-829b-53263ab166ab" (UID: "59b20933-c293-4b5e-829b-53263ab166ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.266245 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data" (OuterVolumeSpecName: "config-data") pod "59b20933-c293-4b5e-829b-53263ab166ab" (UID: "59b20933-c293-4b5e-829b-53263ab166ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.281339 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7cc565dc7d-zt9pz" podUID="53799743-167b-4a74-9cab-3e591a04391b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.336537 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59b20933-c293-4b5e-829b-53263ab166ab-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.336584 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r9hn\" (UniqueName: \"kubernetes.io/projected/59b20933-c293-4b5e-829b-53263ab166ab-kube-api-access-4r9hn\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.336600 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.336613 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b20933-c293-4b5e-829b-53263ab166ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.517512 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.530866 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.538541 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:46 crc kubenswrapper[4603]: E0930 20:08:46.538970 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-metadata" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.538986 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-metadata" Sep 30 20:08:46 crc kubenswrapper[4603]: E0930 20:08:46.539014 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-log" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.539021 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-log" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.539224 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-log" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.539245 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b20933-c293-4b5e-829b-53263ab166ab" containerName="nova-metadata-metadata" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.540241 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.542284 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.542527 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.556204 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.640916 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.640972 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.641012 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.641050 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.641079 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9lfl\" (UniqueName: \"kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.742824 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.742877 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.742910 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.742950 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.742978 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9lfl\" (UniqueName: \"kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.743408 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.750389 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.750739 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.751657 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.761597 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9lfl\" (UniqueName: \"kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl\") pod \"nova-metadata-0\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " pod="openstack/nova-metadata-0" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.777651 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b20933-c293-4b5e-829b-53263ab166ab" path="/var/lib/kubelet/pods/59b20933-c293-4b5e-829b-53263ab166ab/volumes" Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.803416 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.803620 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f6e219d6-424d-4f85-8506-d6a0a69ae998" containerName="kube-state-metrics" containerID="cri-o://ae9625328bae508cee82254b087a19e5ea9b803c804dac2b667ce2cc8ca584bd" gracePeriod=30 Sep 30 20:08:46 crc kubenswrapper[4603]: I0930 20:08:46.855039 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.220576 4603 generic.go:334] "Generic (PLEG): container finished" podID="f6e219d6-424d-4f85-8506-d6a0a69ae998" containerID="ae9625328bae508cee82254b087a19e5ea9b803c804dac2b667ce2cc8ca584bd" exitCode=2 Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.221091 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6e219d6-424d-4f85-8506-d6a0a69ae998","Type":"ContainerDied","Data":"ae9625328bae508cee82254b087a19e5ea9b803c804dac2b667ce2cc8ca584bd"} Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.419734 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.513217 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.558535 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8jgn\" (UniqueName: \"kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn\") pod \"f6e219d6-424d-4f85-8506-d6a0a69ae998\" (UID: \"f6e219d6-424d-4f85-8506-d6a0a69ae998\") " Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.570497 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn" (OuterVolumeSpecName: "kube-api-access-f8jgn") pod "f6e219d6-424d-4f85-8506-d6a0a69ae998" (UID: "f6e219d6-424d-4f85-8506-d6a0a69ae998"). InnerVolumeSpecName "kube-api-access-f8jgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.573202 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.573292 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.642274 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.660592 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8jgn\" (UniqueName: \"kubernetes.io/projected/f6e219d6-424d-4f85-8506-d6a0a69ae998-kube-api-access-f8jgn\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.927370 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.927609 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:08:47 crc kubenswrapper[4603]: I0930 20:08:47.996576 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.005523 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.057031 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.057540 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="dnsmasq-dns" containerID="cri-o://44f46618a98a0c6690a31d3937af06c9b94f5ba8ed144bc4f8380f78d38767f0" gracePeriod=10 Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.242854 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerStarted","Data":"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492"} Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.242890 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerStarted","Data":"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137"} Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.242900 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerStarted","Data":"21c8c7f2f7448f122c74c02f6a16d29cb1ce6066321eb60208a0662b57eff312"} Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.255698 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6e219d6-424d-4f85-8506-d6a0a69ae998","Type":"ContainerDied","Data":"b741463f933e040a7114e61a87d45f1040c6dcccac2b5387e940f6980ef103e1"} Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.255746 4603 scope.go:117] "RemoveContainer" containerID="ae9625328bae508cee82254b087a19e5ea9b803c804dac2b667ce2cc8ca584bd" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.255883 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.270524 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.270503749 podStartE2EDuration="2.270503749s" podCreationTimestamp="2025-09-30 20:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:48.265330237 +0000 UTC m=+1330.203789055" watchObservedRunningTime="2025-09-30 20:08:48.270503749 +0000 UTC m=+1330.208962567" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.301390 4603 generic.go:334] "Generic (PLEG): container finished" podID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerID="44f46618a98a0c6690a31d3937af06c9b94f5ba8ed144bc4f8380f78d38767f0" exitCode=0 Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.301602 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" event={"ID":"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb","Type":"ContainerDied","Data":"44f46618a98a0c6690a31d3937af06c9b94f5ba8ed144bc4f8380f78d38767f0"} Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.349717 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.357663 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.388285 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.424863 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:48 crc kubenswrapper[4603]: E0930 20:08:48.425295 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e219d6-424d-4f85-8506-d6a0a69ae998" containerName="kube-state-metrics" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.425308 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e219d6-424d-4f85-8506-d6a0a69ae998" containerName="kube-state-metrics" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.425507 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e219d6-424d-4f85-8506-d6a0a69ae998" containerName="kube-state-metrics" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.426085 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.429119 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.429976 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.476016 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.485843 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.485919 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4624r\" (UniqueName: \"kubernetes.io/projected/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-api-access-4624r\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.485956 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.486000 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.587205 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.587277 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4624r\" (UniqueName: \"kubernetes.io/projected/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-api-access-4624r\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.587318 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.587361 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.598912 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.605888 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.617142 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.617424 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4624r\" (UniqueName: \"kubernetes.io/projected/96ef0530-4c62-4ebf-b58d-59284fcdcad0-kube-api-access-4624r\") pod \"kube-state-metrics-0\" (UID: \"96ef0530-4c62-4ebf-b58d-59284fcdcad0\") " pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.707527 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.789884 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.790018 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.790123 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42dql\" (UniqueName: \"kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.790194 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.790214 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.790263 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config\") pod \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\" (UID: \"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb\") " Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.791470 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.812316 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql" (OuterVolumeSpecName: "kube-api-access-42dql") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "kube-api-access-42dql". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.879934 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e219d6-424d-4f85-8506-d6a0a69ae998" path="/var/lib/kubelet/pods/f6e219d6-424d-4f85-8506-d6a0a69ae998/volumes" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.922549 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42dql\" (UniqueName: \"kubernetes.io/projected/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-kube-api-access-42dql\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.923117 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config" (OuterVolumeSpecName: "config") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.927529 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.927833 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.972742 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:08:48 crc kubenswrapper[4603]: I0930 20:08:48.980442 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.001679 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.024619 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.024745 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.024813 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.024868 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.051587 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" (UID: "8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.126662 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.317353 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.318559 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" event={"ID":"8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb","Type":"ContainerDied","Data":"446e68de17900efa1a43f499bc2bcca7692a857390b4ee00683905b10cd4cc9d"} Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.318606 4603 scope.go:117] "RemoveContainer" containerID="44f46618a98a0c6690a31d3937af06c9b94f5ba8ed144bc4f8380f78d38767f0" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.388077 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.394096 4603 scope.go:117] "RemoveContainer" containerID="b56f0400d2ca5cc30d639d5f4f8c1d426e4195c798cf7198b87f9509cacb3ec8" Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.400440 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-4m74r"] Sep 30 20:08:49 crc kubenswrapper[4603]: I0930 20:08:49.439842 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.346499 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"96ef0530-4c62-4ebf-b58d-59284fcdcad0","Type":"ContainerStarted","Data":"de765d6b7e4649923cc5c5c2e8d391b50a664caf7c94208f4c5552037bcc01a3"} Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.346917 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.346933 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"96ef0530-4c62-4ebf-b58d-59284fcdcad0","Type":"ContainerStarted","Data":"3e81eb8ec77cfe751ad09d8db25c4f67eb5b01770225f145c7eb7b68e781f28f"} Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.375563 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.977654697 podStartE2EDuration="2.375545166s" podCreationTimestamp="2025-09-30 20:08:48 +0000 UTC" firstStartedPulling="2025-09-30 20:08:49.448103365 +0000 UTC m=+1331.386562183" lastFinishedPulling="2025-09-30 20:08:49.845993834 +0000 UTC m=+1331.784452652" observedRunningTime="2025-09-30 20:08:50.37022195 +0000 UTC m=+1332.308680768" watchObservedRunningTime="2025-09-30 20:08:50.375545166 +0000 UTC m=+1332.314003984" Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.775526 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" path="/var/lib/kubelet/pods/8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb/volumes" Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.786632 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.786898 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-central-agent" containerID="cri-o://6e809cf3af4c2feca444a3a83bfbdb62baebcd275a836482f55cf6b30fe6f210" gracePeriod=30 Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.787025 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="proxy-httpd" containerID="cri-o://814e53e14aa3b0230df52940125ea1de393c8a64cf9b9e804329ea518df71b89" gracePeriod=30 Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.787062 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="sg-core" containerID="cri-o://1604b46be6d8edba6407dbc787c08e3faa60b271510398aecc742d96dbdea246" gracePeriod=30 Sep 30 20:08:50 crc kubenswrapper[4603]: I0930 20:08:50.787095 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-notification-agent" containerID="cri-o://d00d91f71e5d99763fe347c026053169a935e68481d7e76fc55bca15e5da7d0f" gracePeriod=30 Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.366709 4603 generic.go:334] "Generic (PLEG): container finished" podID="a7a6516e-bba5-425d-9772-76eca64356af" containerID="814e53e14aa3b0230df52940125ea1de393c8a64cf9b9e804329ea518df71b89" exitCode=0 Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.367016 4603 generic.go:334] "Generic (PLEG): container finished" podID="a7a6516e-bba5-425d-9772-76eca64356af" containerID="1604b46be6d8edba6407dbc787c08e3faa60b271510398aecc742d96dbdea246" exitCode=2 Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.367024 4603 generic.go:334] "Generic (PLEG): container finished" podID="a7a6516e-bba5-425d-9772-76eca64356af" containerID="6e809cf3af4c2feca444a3a83bfbdb62baebcd275a836482f55cf6b30fe6f210" exitCode=0 Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.366797 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerDied","Data":"814e53e14aa3b0230df52940125ea1de393c8a64cf9b9e804329ea518df71b89"} Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.367314 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerDied","Data":"1604b46be6d8edba6407dbc787c08e3faa60b271510398aecc742d96dbdea246"} Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.367332 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerDied","Data":"6e809cf3af4c2feca444a3a83bfbdb62baebcd275a836482f55cf6b30fe6f210"} Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.855519 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:08:51 crc kubenswrapper[4603]: I0930 20:08:51.855563 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:08:52 crc kubenswrapper[4603]: I0930 20:08:52.378632 4603 generic.go:334] "Generic (PLEG): container finished" podID="8983ec27-7bbe-4844-a826-9a7ce168e605" containerID="11968f3e62894d2d51facf2e5f013b5bb7a5ef9b63bfcf80db93b19386ac53b7" exitCode=0 Sep 30 20:08:52 crc kubenswrapper[4603]: I0930 20:08:52.379452 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6hwp" event={"ID":"8983ec27-7bbe-4844-a826-9a7ce168e605","Type":"ContainerDied","Data":"11968f3e62894d2d51facf2e5f013b5bb7a5ef9b63bfcf80db93b19386ac53b7"} Sep 30 20:08:52 crc kubenswrapper[4603]: I0930 20:08:52.380875 4603 generic.go:334] "Generic (PLEG): container finished" podID="9336de02-cccd-4f98-a9b1-472833b48fca" containerID="f34e580619c851a1dd914b19fd9ef2e253f2fc43e51f7273612341ab1b9f05d8" exitCode=0 Sep 30 20:08:52 crc kubenswrapper[4603]: I0930 20:08:52.380919 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7j69x" event={"ID":"9336de02-cccd-4f98-a9b1-472833b48fca","Type":"ContainerDied","Data":"f34e580619c851a1dd914b19fd9ef2e253f2fc43e51f7273612341ab1b9f05d8"} Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.471773 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bb4fc677f-4m74r" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.166:5353: i/o timeout" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.733701 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.835193 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data\") pod \"8983ec27-7bbe-4844-a826-9a7ce168e605\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.835291 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwjnm\" (UniqueName: \"kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm\") pod \"8983ec27-7bbe-4844-a826-9a7ce168e605\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.835360 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle\") pod \"8983ec27-7bbe-4844-a826-9a7ce168e605\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.835422 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts\") pod \"8983ec27-7bbe-4844-a826-9a7ce168e605\" (UID: \"8983ec27-7bbe-4844-a826-9a7ce168e605\") " Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.845820 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm" (OuterVolumeSpecName: "kube-api-access-cwjnm") pod "8983ec27-7bbe-4844-a826-9a7ce168e605" (UID: "8983ec27-7bbe-4844-a826-9a7ce168e605"). InnerVolumeSpecName "kube-api-access-cwjnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.846598 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts" (OuterVolumeSpecName: "scripts") pod "8983ec27-7bbe-4844-a826-9a7ce168e605" (UID: "8983ec27-7bbe-4844-a826-9a7ce168e605"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.866694 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data" (OuterVolumeSpecName: "config-data") pod "8983ec27-7bbe-4844-a826-9a7ce168e605" (UID: "8983ec27-7bbe-4844-a826-9a7ce168e605"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.867739 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8983ec27-7bbe-4844-a826-9a7ce168e605" (UID: "8983ec27-7bbe-4844-a826-9a7ce168e605"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.909865 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.937628 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.937655 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwjnm\" (UniqueName: \"kubernetes.io/projected/8983ec27-7bbe-4844-a826-9a7ce168e605-kube-api-access-cwjnm\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.937665 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:53 crc kubenswrapper[4603]: I0930 20:08:53.937673 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8983ec27-7bbe-4844-a826-9a7ce168e605-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.041545 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle\") pod \"9336de02-cccd-4f98-a9b1-472833b48fca\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.041639 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qq7f\" (UniqueName: \"kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f\") pod \"9336de02-cccd-4f98-a9b1-472833b48fca\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.041665 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts\") pod \"9336de02-cccd-4f98-a9b1-472833b48fca\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.041764 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data\") pod \"9336de02-cccd-4f98-a9b1-472833b48fca\" (UID: \"9336de02-cccd-4f98-a9b1-472833b48fca\") " Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.054634 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts" (OuterVolumeSpecName: "scripts") pod "9336de02-cccd-4f98-a9b1-472833b48fca" (UID: "9336de02-cccd-4f98-a9b1-472833b48fca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.058977 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f" (OuterVolumeSpecName: "kube-api-access-2qq7f") pod "9336de02-cccd-4f98-a9b1-472833b48fca" (UID: "9336de02-cccd-4f98-a9b1-472833b48fca"). InnerVolumeSpecName "kube-api-access-2qq7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.085175 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data" (OuterVolumeSpecName: "config-data") pod "9336de02-cccd-4f98-a9b1-472833b48fca" (UID: "9336de02-cccd-4f98-a9b1-472833b48fca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.108308 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9336de02-cccd-4f98-a9b1-472833b48fca" (UID: "9336de02-cccd-4f98-a9b1-472833b48fca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.143696 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.143743 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.143761 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qq7f\" (UniqueName: \"kubernetes.io/projected/9336de02-cccd-4f98-a9b1-472833b48fca-kube-api-access-2qq7f\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.143774 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9336de02-cccd-4f98-a9b1-472833b48fca-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.417252 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6hwp" event={"ID":"8983ec27-7bbe-4844-a826-9a7ce168e605","Type":"ContainerDied","Data":"60dbb6c3b236479027f9a7c816085dd7838fc0294ee560786d300c824b61653b"} Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.417600 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60dbb6c3b236479027f9a7c816085dd7838fc0294ee560786d300c824b61653b" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.417668 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6hwp" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.423455 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7j69x" event={"ID":"9336de02-cccd-4f98-a9b1-472833b48fca","Type":"ContainerDied","Data":"72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae"} Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.423511 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72d4653c6f6b40c752b59860cc987d5658443977e2d1b6e07cc743def57cbcae" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.423597 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7j69x" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.511513 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:08:54 crc kubenswrapper[4603]: E0930 20:08:54.511907 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="init" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.511922 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="init" Sep 30 20:08:54 crc kubenswrapper[4603]: E0930 20:08:54.511948 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="dnsmasq-dns" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.511956 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="dnsmasq-dns" Sep 30 20:08:54 crc kubenswrapper[4603]: E0930 20:08:54.511968 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8983ec27-7bbe-4844-a826-9a7ce168e605" containerName="nova-manage" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.511976 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8983ec27-7bbe-4844-a826-9a7ce168e605" containerName="nova-manage" Sep 30 20:08:54 crc kubenswrapper[4603]: E0930 20:08:54.511991 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9336de02-cccd-4f98-a9b1-472833b48fca" containerName="nova-cell1-conductor-db-sync" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.511999 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9336de02-cccd-4f98-a9b1-472833b48fca" containerName="nova-cell1-conductor-db-sync" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.512234 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9336de02-cccd-4f98-a9b1-472833b48fca" containerName="nova-cell1-conductor-db-sync" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.512257 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="8983ec27-7bbe-4844-a826-9a7ce168e605" containerName="nova-manage" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.512278 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ee85c3b-76ee-4f0a-a337-3a7ac2c147bb" containerName="dnsmasq-dns" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.512914 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.522385 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.527216 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.652730 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.652820 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.652928 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghlgh\" (UniqueName: \"kubernetes.io/projected/090bfcb2-0286-4e66-a22f-d79a55de8ff8-kube-api-access-ghlgh\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.701838 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.702090 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-log" containerID="cri-o://8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e" gracePeriod=30 Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.702202 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-api" containerID="cri-o://442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e" gracePeriod=30 Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.721690 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.721918 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerName="nova-scheduler-scheduler" containerID="cri-o://2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" gracePeriod=30 Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.732643 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.732866 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-log" containerID="cri-o://187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" gracePeriod=30 Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.733031 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-metadata" containerID="cri-o://7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" gracePeriod=30 Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.754294 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.754367 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.754462 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghlgh\" (UniqueName: \"kubernetes.io/projected/090bfcb2-0286-4e66-a22f-d79a55de8ff8-kube-api-access-ghlgh\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.760475 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.762940 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090bfcb2-0286-4e66-a22f-d79a55de8ff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.805020 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghlgh\" (UniqueName: \"kubernetes.io/projected/090bfcb2-0286-4e66-a22f-d79a55de8ff8-kube-api-access-ghlgh\") pod \"nova-cell1-conductor-0\" (UID: \"090bfcb2-0286-4e66-a22f-d79a55de8ff8\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:54 crc kubenswrapper[4603]: I0930 20:08:54.851895 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.370890 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:08:55 crc kubenswrapper[4603]: W0930 20:08:55.373757 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod090bfcb2_0286_4e66_a22f_d79a55de8ff8.slice/crio-920f76f753d67af7edae1fa79faacc53426bf9fc19c6327be37b967346e1af46 WatchSource:0}: Error finding container 920f76f753d67af7edae1fa79faacc53426bf9fc19c6327be37b967346e1af46: Status 404 returned error can't find the container with id 920f76f753d67af7edae1fa79faacc53426bf9fc19c6327be37b967346e1af46 Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.399101 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.445929 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"090bfcb2-0286-4e66-a22f-d79a55de8ff8","Type":"ContainerStarted","Data":"920f76f753d67af7edae1fa79faacc53426bf9fc19c6327be37b967346e1af46"} Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.450480 4603 generic.go:334] "Generic (PLEG): container finished" podID="d876cf74-916c-481f-8a01-7b2f8410e258" containerID="8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e" exitCode=143 Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.450532 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerDied","Data":"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e"} Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.466182 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9lfl\" (UniqueName: \"kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl\") pod \"91dc1594-c718-4e37-b43c-77ab0a16b378\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.466239 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs\") pod \"91dc1594-c718-4e37-b43c-77ab0a16b378\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.466290 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle\") pod \"91dc1594-c718-4e37-b43c-77ab0a16b378\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.466343 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data\") pod \"91dc1594-c718-4e37-b43c-77ab0a16b378\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.466480 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs\") pod \"91dc1594-c718-4e37-b43c-77ab0a16b378\" (UID: \"91dc1594-c718-4e37-b43c-77ab0a16b378\") " Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469532 4603 generic.go:334] "Generic (PLEG): container finished" podID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerID="7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" exitCode=0 Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469747 4603 generic.go:334] "Generic (PLEG): container finished" podID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerID="187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" exitCode=143 Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469767 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerDied","Data":"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492"} Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469795 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerDied","Data":"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137"} Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469806 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"91dc1594-c718-4e37-b43c-77ab0a16b378","Type":"ContainerDied","Data":"21c8c7f2f7448f122c74c02f6a16d29cb1ce6066321eb60208a0662b57eff312"} Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469828 4603 scope.go:117] "RemoveContainer" containerID="7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.469980 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.473777 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs" (OuterVolumeSpecName: "logs") pod "91dc1594-c718-4e37-b43c-77ab0a16b378" (UID: "91dc1594-c718-4e37-b43c-77ab0a16b378"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.491756 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl" (OuterVolumeSpecName: "kube-api-access-k9lfl") pod "91dc1594-c718-4e37-b43c-77ab0a16b378" (UID: "91dc1594-c718-4e37-b43c-77ab0a16b378"). InnerVolumeSpecName "kube-api-access-k9lfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.516419 4603 scope.go:117] "RemoveContainer" containerID="187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.541137 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "91dc1594-c718-4e37-b43c-77ab0a16b378" (UID: "91dc1594-c718-4e37-b43c-77ab0a16b378"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.552079 4603 scope.go:117] "RemoveContainer" containerID="7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" Sep 30 20:08:55 crc kubenswrapper[4603]: E0930 20:08:55.552815 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492\": container with ID starting with 7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492 not found: ID does not exist" containerID="7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.552874 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492"} err="failed to get container status \"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492\": rpc error: code = NotFound desc = could not find container \"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492\": container with ID starting with 7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492 not found: ID does not exist" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.552902 4603 scope.go:117] "RemoveContainer" containerID="187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" Sep 30 20:08:55 crc kubenswrapper[4603]: E0930 20:08:55.554917 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137\": container with ID starting with 187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137 not found: ID does not exist" containerID="187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.554947 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137"} err="failed to get container status \"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137\": rpc error: code = NotFound desc = could not find container \"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137\": container with ID starting with 187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137 not found: ID does not exist" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.554961 4603 scope.go:117] "RemoveContainer" containerID="7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.555314 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492"} err="failed to get container status \"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492\": rpc error: code = NotFound desc = could not find container \"7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492\": container with ID starting with 7cdac4faeea82db5efd2441d36385126f386d6fb41b0ca518f246556b99d4492 not found: ID does not exist" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.555332 4603 scope.go:117] "RemoveContainer" containerID="187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.555607 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137"} err="failed to get container status \"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137\": rpc error: code = NotFound desc = could not find container \"187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137\": container with ID starting with 187336b8e96587a73a8b2edd6de59f44fb44444d41e2f1e5ae3fb8b62c86f137 not found: ID does not exist" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.570974 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9lfl\" (UniqueName: \"kubernetes.io/projected/91dc1594-c718-4e37-b43c-77ab0a16b378-kube-api-access-k9lfl\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.571004 4603 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.571044 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91dc1594-c718-4e37-b43c-77ab0a16b378-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.574906 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data" (OuterVolumeSpecName: "config-data") pod "91dc1594-c718-4e37-b43c-77ab0a16b378" (UID: "91dc1594-c718-4e37-b43c-77ab0a16b378"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.575673 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91dc1594-c718-4e37-b43c-77ab0a16b378" (UID: "91dc1594-c718-4e37-b43c-77ab0a16b378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.672229 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.672258 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dc1594-c718-4e37-b43c-77ab0a16b378-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.807749 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.818369 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.830655 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:55 crc kubenswrapper[4603]: E0930 20:08:55.831296 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-metadata" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.831324 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-metadata" Sep 30 20:08:55 crc kubenswrapper[4603]: E0930 20:08:55.831366 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-log" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.831377 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-log" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.831623 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-metadata" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.831655 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" containerName="nova-metadata-log" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.832967 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.835575 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.835778 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.852353 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.978867 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.978937 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.978962 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.979243 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n79jn\" (UniqueName: \"kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:55 crc kubenswrapper[4603]: I0930 20:08:55.979383 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.081602 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n79jn\" (UniqueName: \"kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.081689 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.081778 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.081825 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.081853 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.083203 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.086934 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.087142 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.088098 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.109927 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n79jn\" (UniqueName: \"kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn\") pod \"nova-metadata-0\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.149297 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.481676 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"090bfcb2-0286-4e66-a22f-d79a55de8ff8","Type":"ContainerStarted","Data":"6759f966b0af5ad639128bc7a95a1b67a6d0e9a94ba4fd8866c1c90cc0428210"} Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.482300 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.493611 4603 generic.go:334] "Generic (PLEG): container finished" podID="a7a6516e-bba5-425d-9772-76eca64356af" containerID="d00d91f71e5d99763fe347c026053169a935e68481d7e76fc55bca15e5da7d0f" exitCode=0 Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.493650 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerDied","Data":"d00d91f71e5d99763fe347c026053169a935e68481d7e76fc55bca15e5da7d0f"} Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.504692 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.50467146 podStartE2EDuration="2.50467146s" podCreationTimestamp="2025-09-30 20:08:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:56.500642969 +0000 UTC m=+1338.439101787" watchObservedRunningTime="2025-09-30 20:08:56.50467146 +0000 UTC m=+1338.443130278" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.617551 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.774977 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91dc1594-c718-4e37-b43c-77ab0a16b378" path="/var/lib/kubelet/pods/91dc1594-c718-4e37-b43c-77ab0a16b378/volumes" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.778696 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.895761 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.895799 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.895834 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxrmv\" (UniqueName: \"kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.895896 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.895967 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.896081 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.896108 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml\") pod \"a7a6516e-bba5-425d-9772-76eca64356af\" (UID: \"a7a6516e-bba5-425d-9772-76eca64356af\") " Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.896756 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.902434 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.910442 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv" (OuterVolumeSpecName: "kube-api-access-sxrmv") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "kube-api-access-sxrmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.912857 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts" (OuterVolumeSpecName: "scripts") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.961950 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.998505 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.998536 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.998546 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.998555 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxrmv\" (UniqueName: \"kubernetes.io/projected/a7a6516e-bba5-425d-9772-76eca64356af-kube-api-access-sxrmv\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:56 crc kubenswrapper[4603]: I0930 20:08:56.998567 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7a6516e-bba5-425d-9772-76eca64356af-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.034316 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data" (OuterVolumeSpecName: "config-data") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.040212 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7a6516e-bba5-425d-9772-76eca64356af" (UID: "a7a6516e-bba5-425d-9772-76eca64356af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.100698 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.100728 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a6516e-bba5-425d-9772-76eca64356af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.504418 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerStarted","Data":"2c347a80d788af89639deb110ea77950f792327066536a2fe1518aed5767ccd8"} Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.504470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerStarted","Data":"730758919b3fd9506e0ce7dfbca68a3f0ad27785a36ad6a11b51985aeea59f19"} Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.504485 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerStarted","Data":"86f369f2b9f7e3bb26ec3a926470180d7513ded8b1a2b5b07435ca83a6d8be82"} Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.507502 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7a6516e-bba5-425d-9772-76eca64356af","Type":"ContainerDied","Data":"e09bc265b4a707859915ed25de1b88cae5966807ededbd1e6c5eeb796eca5a33"} Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.507536 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.507556 4603 scope.go:117] "RemoveContainer" containerID="814e53e14aa3b0230df52940125ea1de393c8a64cf9b9e804329ea518df71b89" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.533098 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.5330834810000002 podStartE2EDuration="2.533083481s" podCreationTimestamp="2025-09-30 20:08:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:08:57.532023103 +0000 UTC m=+1339.470481921" watchObservedRunningTime="2025-09-30 20:08:57.533083481 +0000 UTC m=+1339.471542289" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.538310 4603 scope.go:117] "RemoveContainer" containerID="1604b46be6d8edba6407dbc787c08e3faa60b271510398aecc742d96dbdea246" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.552213 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.564054 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.567102 4603 scope.go:117] "RemoveContainer" containerID="d00d91f71e5d99763fe347c026053169a935e68481d7e76fc55bca15e5da7d0f" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.585272 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.586848 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.587297 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-notification-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587311 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-notification-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.587328 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="proxy-httpd" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587333 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="proxy-httpd" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.587358 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="sg-core" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587365 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="sg-core" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.587386 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-central-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587392 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-central-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587569 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-notification-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587586 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="proxy-httpd" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587595 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="ceilometer-central-agent" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.587606 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a6516e-bba5-425d-9772-76eca64356af" containerName="sg-core" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.590396 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.596952 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.599874 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:08:57 crc kubenswrapper[4603]: E0930 20:08:57.599934 4603 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerName="nova-scheduler-scheduler" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.600526 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.600717 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.600887 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.628116 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.643613 4603 scope.go:117] "RemoveContainer" containerID="6e809cf3af4c2feca444a3a83bfbdb62baebcd275a836482f55cf6b30fe6f210" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.709242 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.709472 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.709605 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.709728 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24kbv\" (UniqueName: \"kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.709871 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.710001 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.710131 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.710242 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811605 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811647 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24kbv\" (UniqueName: \"kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811693 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811736 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811763 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811784 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811816 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.811838 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.812261 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.812369 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.815534 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.817572 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.817742 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.819015 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.826979 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.852523 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24kbv\" (UniqueName: \"kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv\") pod \"ceilometer-0\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " pod="openstack/ceilometer-0" Sep 30 20:08:57 crc kubenswrapper[4603]: I0930 20:08:57.927991 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.281646 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.469735 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data\") pod \"d876cf74-916c-481f-8a01-7b2f8410e258\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.469845 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k2lw\" (UniqueName: \"kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw\") pod \"d876cf74-916c-481f-8a01-7b2f8410e258\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.469872 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle\") pod \"d876cf74-916c-481f-8a01-7b2f8410e258\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.469958 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs\") pod \"d876cf74-916c-481f-8a01-7b2f8410e258\" (UID: \"d876cf74-916c-481f-8a01-7b2f8410e258\") " Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.472789 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs" (OuterVolumeSpecName: "logs") pod "d876cf74-916c-481f-8a01-7b2f8410e258" (UID: "d876cf74-916c-481f-8a01-7b2f8410e258"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.479392 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw" (OuterVolumeSpecName: "kube-api-access-6k2lw") pod "d876cf74-916c-481f-8a01-7b2f8410e258" (UID: "d876cf74-916c-481f-8a01-7b2f8410e258"). InnerVolumeSpecName "kube-api-access-6k2lw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.506653 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data" (OuterVolumeSpecName: "config-data") pod "d876cf74-916c-481f-8a01-7b2f8410e258" (UID: "d876cf74-916c-481f-8a01-7b2f8410e258"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.508091 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d876cf74-916c-481f-8a01-7b2f8410e258" (UID: "d876cf74-916c-481f-8a01-7b2f8410e258"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.519322 4603 generic.go:334] "Generic (PLEG): container finished" podID="d876cf74-916c-481f-8a01-7b2f8410e258" containerID="442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e" exitCode=0 Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.519378 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerDied","Data":"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e"} Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.519404 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d876cf74-916c-481f-8a01-7b2f8410e258","Type":"ContainerDied","Data":"596a15290e5d7a30cf2c71e03a175c6aa64966f5010ff4e7e3ebaea81f006821"} Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.519420 4603 scope.go:117] "RemoveContainer" containerID="442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.519555 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.554446 4603 scope.go:117] "RemoveContainer" containerID="8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.559543 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.573813 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.575043 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d876cf74-916c-481f-8a01-7b2f8410e258-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.575062 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.575073 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k2lw\" (UniqueName: \"kubernetes.io/projected/d876cf74-916c-481f-8a01-7b2f8410e258-kube-api-access-6k2lw\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.575081 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d876cf74-916c-481f-8a01-7b2f8410e258-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.598470 4603 scope.go:117] "RemoveContainer" containerID="442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e" Sep 30 20:08:58 crc kubenswrapper[4603]: E0930 20:08:58.602711 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e\": container with ID starting with 442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e not found: ID does not exist" containerID="442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603039 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e"} err="failed to get container status \"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e\": rpc error: code = NotFound desc = could not find container \"442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e\": container with ID starting with 442f39bdc04c116f558a1b41a0109b1e3ee6788762e77c6fac4da17c0f695e8e not found: ID does not exist" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603073 4603 scope.go:117] "RemoveContainer" containerID="8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603201 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:58 crc kubenswrapper[4603]: E0930 20:08:58.603705 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-log" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603723 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-log" Sep 30 20:08:58 crc kubenswrapper[4603]: E0930 20:08:58.603754 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-api" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603763 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-api" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.603988 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-log" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.604022 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" containerName="nova-api-api" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.605285 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.608676 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.610547 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:58 crc kubenswrapper[4603]: E0930 20:08:58.611557 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e\": container with ID starting with 8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e not found: ID does not exist" containerID="8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.611938 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e"} err="failed to get container status \"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e\": rpc error: code = NotFound desc = could not find container \"8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e\": container with ID starting with 8d12f472197a654454db68a8a0192239ce3889b8199ce38bdcb4c1642f5c438e not found: ID does not exist" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.618920 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.774578 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7a6516e-bba5-425d-9772-76eca64356af" path="/var/lib/kubelet/pods/a7a6516e-bba5-425d-9772-76eca64356af/volumes" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.775529 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d876cf74-916c-481f-8a01-7b2f8410e258" path="/var/lib/kubelet/pods/d876cf74-916c-481f-8a01-7b2f8410e258/volumes" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.780869 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjddw\" (UniqueName: \"kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.780941 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.781039 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.781062 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.807582 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.883037 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.883159 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.883190 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.883263 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjddw\" (UniqueName: \"kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.883673 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.887370 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.888898 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.900523 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjddw\" (UniqueName: \"kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw\") pod \"nova-api-0\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " pod="openstack/nova-api-0" Sep 30 20:08:58 crc kubenswrapper[4603]: I0930 20:08:58.931923 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.397646 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.411962 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.550837 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerStarted","Data":"ea7f0ee3295e286b2623cd890878e1f340ecfe5e3bd3aac4de1aef82f6b7c9ed"} Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.552874 4603 generic.go:334] "Generic (PLEG): container finished" podID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" exitCode=0 Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.552928 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9bcf0dc8-f669-44d5-a72f-824e5f3329dc","Type":"ContainerDied","Data":"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065"} Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.552947 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.552970 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9bcf0dc8-f669-44d5-a72f-824e5f3329dc","Type":"ContainerDied","Data":"59b801e1e37bc726bbe370cf6305675b49fc1c85044e7de9ca4f81e5170892b2"} Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.552987 4603 scope.go:117] "RemoveContainer" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.556372 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerStarted","Data":"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6"} Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.556399 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerStarted","Data":"a099caf07032314c23a5cd455d26d521a705b7912a5edac5bf0169027ba1d10d"} Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.583394 4603 scope.go:117] "RemoveContainer" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" Sep 30 20:08:59 crc kubenswrapper[4603]: E0930 20:08:59.583781 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065\": container with ID starting with 2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065 not found: ID does not exist" containerID="2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.583810 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065"} err="failed to get container status \"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065\": rpc error: code = NotFound desc = could not find container \"2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065\": container with ID starting with 2546ef14274f89a4307dad18c77b4bf6632d47e11d89d31b09bee48da2788065 not found: ID does not exist" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.596993 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data\") pod \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.597215 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-948ph\" (UniqueName: \"kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph\") pod \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.597676 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle\") pod \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\" (UID: \"9bcf0dc8-f669-44d5-a72f-824e5f3329dc\") " Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.600663 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph" (OuterVolumeSpecName: "kube-api-access-948ph") pod "9bcf0dc8-f669-44d5-a72f-824e5f3329dc" (UID: "9bcf0dc8-f669-44d5-a72f-824e5f3329dc"). InnerVolumeSpecName "kube-api-access-948ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.634333 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bcf0dc8-f669-44d5-a72f-824e5f3329dc" (UID: "9bcf0dc8-f669-44d5-a72f-824e5f3329dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.635440 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data" (OuterVolumeSpecName: "config-data") pod "9bcf0dc8-f669-44d5-a72f-824e5f3329dc" (UID: "9bcf0dc8-f669-44d5-a72f-824e5f3329dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.674956 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.676365 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.704538 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.704756 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.704793 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-948ph\" (UniqueName: \"kubernetes.io/projected/9bcf0dc8-f669-44d5-a72f-824e5f3329dc-kube-api-access-948ph\") on node \"crc\" DevicePath \"\"" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.891026 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.937088 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.981478 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:08:59 crc kubenswrapper[4603]: E0930 20:08:59.982057 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerName="nova-scheduler-scheduler" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.982082 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerName="nova-scheduler-scheduler" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.982339 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" containerName="nova-scheduler-scheduler" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.983115 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.986604 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:08:59 crc kubenswrapper[4603]: I0930 20:08:59.991035 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.113538 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.117438 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qftbw\" (UniqueName: \"kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.117844 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.222384 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.222455 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.222512 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qftbw\" (UniqueName: \"kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.240675 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.262750 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.268279 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qftbw\" (UniqueName: \"kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw\") pod \"nova-scheduler-0\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.344539 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.568046 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerStarted","Data":"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3"} Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.570740 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerStarted","Data":"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966"} Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.570778 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerStarted","Data":"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404"} Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.601815 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.601792522 podStartE2EDuration="2.601792522s" podCreationTimestamp="2025-09-30 20:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:00.589433923 +0000 UTC m=+1342.527892751" watchObservedRunningTime="2025-09-30 20:09:00.601792522 +0000 UTC m=+1342.540251340" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.783411 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bcf0dc8-f669-44d5-a72f-824e5f3329dc" path="/var/lib/kubelet/pods/9bcf0dc8-f669-44d5-a72f-824e5f3329dc/volumes" Sep 30 20:09:00 crc kubenswrapper[4603]: I0930 20:09:00.852269 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.149923 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.149999 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.584858 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7e924a-9658-47f8-8c6c-ca0a62758e97","Type":"ContainerStarted","Data":"addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141"} Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.585237 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7e924a-9658-47f8-8c6c-ca0a62758e97","Type":"ContainerStarted","Data":"27b72fe42651dd7af16fdb44454ff8308bd95235af45d5899f5331aea8e4ef7d"} Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.587704 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerStarted","Data":"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7"} Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.602371 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.60235263 podStartE2EDuration="2.60235263s" podCreationTimestamp="2025-09-30 20:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:01.601816234 +0000 UTC m=+1343.540275052" watchObservedRunningTime="2025-09-30 20:09:01.60235263 +0000 UTC m=+1343.540811448" Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.862276 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.908789 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7cc565dc7d-zt9pz" Sep 30 20:09:01 crc kubenswrapper[4603]: I0930 20:09:01.987463 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:09:02 crc kubenswrapper[4603]: I0930 20:09:02.595585 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon-log" containerID="cri-o://8d002b92e3d74d3d7e892e05a13ecb49ba4438c9d0c3d1ad87f0336586cd4f40" gracePeriod=30 Sep 30 20:09:02 crc kubenswrapper[4603]: I0930 20:09:02.595623 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" containerID="cri-o://9e154543257947fee23e9ce76eec503f256afbb70115d93c4c6b2cc1ce92634e" gracePeriod=30 Sep 30 20:09:03 crc kubenswrapper[4603]: I0930 20:09:03.606638 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerStarted","Data":"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9"} Sep 30 20:09:03 crc kubenswrapper[4603]: I0930 20:09:03.606912 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:09:03 crc kubenswrapper[4603]: I0930 20:09:03.641363 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.403804197 podStartE2EDuration="6.641339862s" podCreationTimestamp="2025-09-30 20:08:57 +0000 UTC" firstStartedPulling="2025-09-30 20:08:58.630865017 +0000 UTC m=+1340.569323835" lastFinishedPulling="2025-09-30 20:09:02.868400682 +0000 UTC m=+1344.806859500" observedRunningTime="2025-09-30 20:09:03.638752812 +0000 UTC m=+1345.577211640" watchObservedRunningTime="2025-09-30 20:09:03.641339862 +0000 UTC m=+1345.579798720" Sep 30 20:09:04 crc kubenswrapper[4603]: I0930 20:09:04.897599 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 20:09:05 crc kubenswrapper[4603]: I0930 20:09:05.345495 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.150505 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.150842 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.246681 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.636067 4603 generic.go:334] "Generic (PLEG): container finished" podID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerID="9e154543257947fee23e9ce76eec503f256afbb70115d93c4c6b2cc1ce92634e" exitCode=0 Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.636461 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerDied","Data":"9e154543257947fee23e9ce76eec503f256afbb70115d93c4c6b2cc1ce92634e"} Sep 30 20:09:06 crc kubenswrapper[4603]: I0930 20:09:06.636614 4603 scope.go:117] "RemoveContainer" containerID="6bb1c49431f42d1bd1c1e74568ecaae5529f59ed001930e8cceac3534124407f" Sep 30 20:09:07 crc kubenswrapper[4603]: I0930 20:09:07.163387 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:07 crc kubenswrapper[4603]: I0930 20:09:07.163405 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:08 crc kubenswrapper[4603]: I0930 20:09:08.932324 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:08 crc kubenswrapper[4603]: I0930 20:09:08.932376 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:10 crc kubenswrapper[4603]: I0930 20:09:10.015435 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:10 crc kubenswrapper[4603]: I0930 20:09:10.015448 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:10 crc kubenswrapper[4603]: I0930 20:09:10.346007 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:09:10 crc kubenswrapper[4603]: I0930 20:09:10.381106 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:09:10 crc kubenswrapper[4603]: I0930 20:09:10.717481 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.636439 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.719584 4603 generic.go:334] "Generic (PLEG): container finished" podID="b49ca741-a549-4ac3-bdc4-90b59a52692a" containerID="9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc" exitCode=137 Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.719624 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b49ca741-a549-4ac3-bdc4-90b59a52692a","Type":"ContainerDied","Data":"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc"} Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.719662 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b49ca741-a549-4ac3-bdc4-90b59a52692a","Type":"ContainerDied","Data":"4fc50e073671f337685a9714ae760fdb878644344d3ebd1c3e4632c1b7c123d5"} Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.719669 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.719681 4603 scope.go:117] "RemoveContainer" containerID="9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.729944 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns6qq\" (UniqueName: \"kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq\") pod \"b49ca741-a549-4ac3-bdc4-90b59a52692a\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.730093 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data\") pod \"b49ca741-a549-4ac3-bdc4-90b59a52692a\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.730228 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle\") pod \"b49ca741-a549-4ac3-bdc4-90b59a52692a\" (UID: \"b49ca741-a549-4ac3-bdc4-90b59a52692a\") " Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.742465 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq" (OuterVolumeSpecName: "kube-api-access-ns6qq") pod "b49ca741-a549-4ac3-bdc4-90b59a52692a" (UID: "b49ca741-a549-4ac3-bdc4-90b59a52692a"). InnerVolumeSpecName "kube-api-access-ns6qq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.742554 4603 scope.go:117] "RemoveContainer" containerID="9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc" Sep 30 20:09:15 crc kubenswrapper[4603]: E0930 20:09:15.742988 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc\": container with ID starting with 9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc not found: ID does not exist" containerID="9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.743015 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc"} err="failed to get container status \"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc\": rpc error: code = NotFound desc = could not find container \"9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc\": container with ID starting with 9350dd32ad07e6243f56be4587afcc5800ea0280b0c89a1932245de8bb9c7bdc not found: ID does not exist" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.767484 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b49ca741-a549-4ac3-bdc4-90b59a52692a" (UID: "b49ca741-a549-4ac3-bdc4-90b59a52692a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.770913 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data" (OuterVolumeSpecName: "config-data") pod "b49ca741-a549-4ac3-bdc4-90b59a52692a" (UID: "b49ca741-a549-4ac3-bdc4-90b59a52692a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.833130 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns6qq\" (UniqueName: \"kubernetes.io/projected/b49ca741-a549-4ac3-bdc4-90b59a52692a-kube-api-access-ns6qq\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.833206 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:15 crc kubenswrapper[4603]: I0930 20:09:15.833226 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b49ca741-a549-4ac3-bdc4-90b59a52692a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.076406 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.084982 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.101451 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:09:16 crc kubenswrapper[4603]: E0930 20:09:16.101797 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b49ca741-a549-4ac3-bdc4-90b59a52692a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.101814 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b49ca741-a549-4ac3-bdc4-90b59a52692a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.102006 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="b49ca741-a549-4ac3-bdc4-90b59a52692a" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.102648 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.110552 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.111432 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.112067 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.119004 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.138857 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.138940 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.138976 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.139076 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l77cl\" (UniqueName: \"kubernetes.io/projected/1e631e44-8a31-40d5-8463-cc93716e2a6c-kube-api-access-l77cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.139297 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.161405 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.165762 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.169729 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.240599 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.241117 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.241319 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.241369 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.241553 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l77cl\" (UniqueName: \"kubernetes.io/projected/1e631e44-8a31-40d5-8463-cc93716e2a6c-kube-api-access-l77cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.245205 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.245904 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.246226 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.246642 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.246653 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e631e44-8a31-40d5-8463-cc93716e2a6c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.265016 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l77cl\" (UniqueName: \"kubernetes.io/projected/1e631e44-8a31-40d5-8463-cc93716e2a6c-kube-api-access-l77cl\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e631e44-8a31-40d5-8463-cc93716e2a6c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.429476 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.738320 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.793086 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b49ca741-a549-4ac3-bdc4-90b59a52692a" path="/var/lib/kubelet/pods/b49ca741-a549-4ac3-bdc4-90b59a52692a/volumes" Sep 30 20:09:16 crc kubenswrapper[4603]: I0930 20:09:16.899545 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:09:16 crc kubenswrapper[4603]: W0930 20:09:16.905275 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e631e44_8a31_40d5_8463_cc93716e2a6c.slice/crio-9971ef3149b89fe26bd9c598a66de607f89408d562599f2df24965d41f41eea2 WatchSource:0}: Error finding container 9971ef3149b89fe26bd9c598a66de607f89408d562599f2df24965d41f41eea2: Status 404 returned error can't find the container with id 9971ef3149b89fe26bd9c598a66de607f89408d562599f2df24965d41f41eea2 Sep 30 20:09:17 crc kubenswrapper[4603]: I0930 20:09:17.743422 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e631e44-8a31-40d5-8463-cc93716e2a6c","Type":"ContainerStarted","Data":"f103e893f5a5068f00e8eb54ad6747b01331cf4ee7ea538692bc560513572150"} Sep 30 20:09:17 crc kubenswrapper[4603]: I0930 20:09:17.743854 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e631e44-8a31-40d5-8463-cc93716e2a6c","Type":"ContainerStarted","Data":"9971ef3149b89fe26bd9c598a66de607f89408d562599f2df24965d41f41eea2"} Sep 30 20:09:17 crc kubenswrapper[4603]: I0930 20:09:17.764779 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.764763446 podStartE2EDuration="1.764763446s" podCreationTimestamp="2025-09-30 20:09:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:17.764261242 +0000 UTC m=+1359.702720060" watchObservedRunningTime="2025-09-30 20:09:17.764763446 +0000 UTC m=+1359.703222274" Sep 30 20:09:18 crc kubenswrapper[4603]: I0930 20:09:18.941207 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:09:18 crc kubenswrapper[4603]: I0930 20:09:18.942138 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:09:18 crc kubenswrapper[4603]: I0930 20:09:18.946770 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:09:18 crc kubenswrapper[4603]: I0930 20:09:18.947394 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:09:19 crc kubenswrapper[4603]: I0930 20:09:19.767196 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:09:19 crc kubenswrapper[4603]: I0930 20:09:19.772970 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.034991 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.048601 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.048742 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143262 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143351 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98qgp\" (UniqueName: \"kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143374 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143412 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143492 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.143509 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245114 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245156 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245251 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245311 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98qgp\" (UniqueName: \"kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245328 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.245366 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.246118 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.246137 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.246452 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.246640 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.246727 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.270743 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98qgp\" (UniqueName: \"kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp\") pod \"dnsmasq-dns-5c7b6c5df9-pdvdd\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:20 crc kubenswrapper[4603]: I0930 20:09:20.379499 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:21 crc kubenswrapper[4603]: I0930 20:09:20.941572 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:09:21 crc kubenswrapper[4603]: I0930 20:09:21.430124 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:21 crc kubenswrapper[4603]: I0930 20:09:21.793425 4603 generic.go:334] "Generic (PLEG): container finished" podID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerID="91bf0eb5be3de5a9e3d9d7979d6fdc7a1d445dfa4bbe1f08d52961796eeaf6aa" exitCode=0 Sep 30 20:09:21 crc kubenswrapper[4603]: I0930 20:09:21.794734 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" event={"ID":"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c","Type":"ContainerDied","Data":"91bf0eb5be3de5a9e3d9d7979d6fdc7a1d445dfa4bbe1f08d52961796eeaf6aa"} Sep 30 20:09:21 crc kubenswrapper[4603]: I0930 20:09:21.794764 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" event={"ID":"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c","Type":"ContainerStarted","Data":"8fcb234729854fbd247f00069c0a88c699ce44d883fe0bf5386302ba0d358241"} Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.422776 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.651037 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.651466 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-central-agent" containerID="cri-o://535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.652412 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-notification-agent" containerID="cri-o://23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.652485 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="sg-core" containerID="cri-o://d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.652647 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="proxy-httpd" containerID="cri-o://fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.755279 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": read tcp 10.217.0.2:43278->10.217.0.195:3000: read: connection reset by peer" Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.805005 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" event={"ID":"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c","Type":"ContainerStarted","Data":"39f4b5b1f6662f6c2b2dc3bd4b72aa50b95bfc41538e57108788a27fcbaa2124"} Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.806491 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.811607 4603 generic.go:334] "Generic (PLEG): container finished" podID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerID="d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7" exitCode=2 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.811975 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-log" containerID="cri-o://ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.812067 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-api" containerID="cri-o://271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966" gracePeriod=30 Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.811679 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerDied","Data":"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7"} Sep 30 20:09:22 crc kubenswrapper[4603]: I0930 20:09:22.834235 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" podStartSLOduration=3.8341576059999998 podStartE2EDuration="3.834157606s" podCreationTimestamp="2025-09-30 20:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:22.827893101 +0000 UTC m=+1364.766351929" watchObservedRunningTime="2025-09-30 20:09:22.834157606 +0000 UTC m=+1364.772616424" Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.823367 4603 generic.go:334] "Generic (PLEG): container finished" podID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerID="fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9" exitCode=0 Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.823679 4603 generic.go:334] "Generic (PLEG): container finished" podID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerID="535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6" exitCode=0 Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.823399 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerDied","Data":"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9"} Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.823741 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerDied","Data":"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6"} Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.826887 4603 generic.go:334] "Generic (PLEG): container finished" podID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerID="ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404" exitCode=143 Sep 30 20:09:23 crc kubenswrapper[4603]: I0930 20:09:23.826994 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerDied","Data":"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404"} Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.246925 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-8575cd6744-wt57f" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.247290 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.431291 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.461076 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.564527 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.682693 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle\") pod \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.682789 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjddw\" (UniqueName: \"kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw\") pod \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.682999 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data\") pod \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.683041 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs\") pod \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\" (UID: \"3ca325a7-0ea9-42b1-8401-102f1a20e9a1\") " Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.683894 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs" (OuterVolumeSpecName: "logs") pod "3ca325a7-0ea9-42b1-8401-102f1a20e9a1" (UID: "3ca325a7-0ea9-42b1-8401-102f1a20e9a1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.692348 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw" (OuterVolumeSpecName: "kube-api-access-hjddw") pod "3ca325a7-0ea9-42b1-8401-102f1a20e9a1" (UID: "3ca325a7-0ea9-42b1-8401-102f1a20e9a1"). InnerVolumeSpecName "kube-api-access-hjddw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.724411 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data" (OuterVolumeSpecName: "config-data") pod "3ca325a7-0ea9-42b1-8401-102f1a20e9a1" (UID: "3ca325a7-0ea9-42b1-8401-102f1a20e9a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.785313 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjddw\" (UniqueName: \"kubernetes.io/projected/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-kube-api-access-hjddw\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.785346 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.785356 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.786599 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ca325a7-0ea9-42b1-8401-102f1a20e9a1" (UID: "3ca325a7-0ea9-42b1-8401-102f1a20e9a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.854039 4603 generic.go:334] "Generic (PLEG): container finished" podID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerID="271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966" exitCode=0 Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.855355 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.855383 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerDied","Data":"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966"} Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.855410 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ca325a7-0ea9-42b1-8401-102f1a20e9a1","Type":"ContainerDied","Data":"ea7f0ee3295e286b2623cd890878e1f340ecfe5e3bd3aac4de1aef82f6b7c9ed"} Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.855426 4603 scope.go:117] "RemoveContainer" containerID="271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.882477 4603 scope.go:117] "RemoveContainer" containerID="ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.889804 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.891794 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.893257 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ca325a7-0ea9-42b1-8401-102f1a20e9a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.916219 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.937074 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:26 crc kubenswrapper[4603]: E0930 20:09:26.937533 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-log" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.937551 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-log" Sep 30 20:09:26 crc kubenswrapper[4603]: E0930 20:09:26.937578 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-api" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.937585 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-api" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.937758 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-api" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.937794 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" containerName="nova-api-log" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.938729 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.943607 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.943621 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.943901 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.981233 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.996648 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g99gv\" (UniqueName: \"kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.996888 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.996922 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:26 crc kubenswrapper[4603]: I0930 20:09:26.997796 4603 scope.go:117] "RemoveContainer" containerID="271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.000539 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.000618 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.000755 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: E0930 20:09:27.005309 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966\": container with ID starting with 271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966 not found: ID does not exist" containerID="271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.005359 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966"} err="failed to get container status \"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966\": rpc error: code = NotFound desc = could not find container \"271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966\": container with ID starting with 271a90a05aec3fa5a3a5816510afd804176646346e08a1c3f444b09c49d21966 not found: ID does not exist" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.005385 4603 scope.go:117] "RemoveContainer" containerID="ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404" Sep 30 20:09:27 crc kubenswrapper[4603]: E0930 20:09:27.010686 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404\": container with ID starting with ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404 not found: ID does not exist" containerID="ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.010758 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404"} err="failed to get container status \"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404\": rpc error: code = NotFound desc = could not find container \"ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404\": container with ID starting with ed8ebf58b3f1792e8d26acd6bfecda56c70d473f0d801dc52ff64d6b41c6c404 not found: ID does not exist" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.097999 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rs88k"] Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.102016 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.103795 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.103857 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g99gv\" (UniqueName: \"kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.103917 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.103936 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.103976 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.104004 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.109836 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.109917 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.110158 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.110205 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.111586 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.114834 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.117903 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.133069 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g99gv\" (UniqueName: \"kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv\") pod \"nova-api-0\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.136783 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rs88k"] Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.207413 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.207453 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.207490 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.207512 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqtv5\" (UniqueName: \"kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.272627 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.309684 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.309739 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.309785 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.309816 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqtv5\" (UniqueName: \"kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.325017 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.336010 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.347044 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.354604 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqtv5\" (UniqueName: \"kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5\") pod \"nova-cell1-cell-mapping-rs88k\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.520554 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.534681 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628029 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628196 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628222 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628272 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628315 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628354 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24kbv\" (UniqueName: \"kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628396 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.628421 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data\") pod \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\" (UID: \"fc47f1d4-1ac9-4670-974f-00b6092fcf95\") " Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.629235 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.629287 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.640176 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts" (OuterVolumeSpecName: "scripts") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.653928 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv" (OuterVolumeSpecName: "kube-api-access-24kbv") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "kube-api-access-24kbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.694568 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.735892 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24kbv\" (UniqueName: \"kubernetes.io/projected/fc47f1d4-1ac9-4670-974f-00b6092fcf95-kube-api-access-24kbv\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.735936 4603 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.735949 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.735961 4603 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc47f1d4-1ac9-4670-974f-00b6092fcf95-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.735987 4603 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.744316 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.781423 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.848865 4603 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.848892 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.862575 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data" (OuterVolumeSpecName: "config-data") pod "fc47f1d4-1ac9-4670-974f-00b6092fcf95" (UID: "fc47f1d4-1ac9-4670-974f-00b6092fcf95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.952289 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc47f1d4-1ac9-4670-974f-00b6092fcf95-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.996385 4603 generic.go:334] "Generic (PLEG): container finished" podID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerID="23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3" exitCode=0 Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.996469 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerDied","Data":"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3"} Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.996496 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc47f1d4-1ac9-4670-974f-00b6092fcf95","Type":"ContainerDied","Data":"a099caf07032314c23a5cd455d26d521a705b7912a5edac5bf0169027ba1d10d"} Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.996514 4603 scope.go:117] "RemoveContainer" containerID="fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9" Sep 30 20:09:27 crc kubenswrapper[4603]: I0930 20:09:27.996625 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.005676 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.078045 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rs88k"] Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.277715 4603 scope.go:117] "RemoveContainer" containerID="d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.295146 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.306660 4603 scope.go:117] "RemoveContainer" containerID="23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.325473 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.343157 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.343683 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="sg-core" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.343705 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="sg-core" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.346518 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="proxy-httpd" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346537 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="proxy-httpd" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.346558 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-notification-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346564 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-notification-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.346582 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-central-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346589 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-central-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346864 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="proxy-httpd" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346892 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-notification-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346904 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="ceilometer-central-agent" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.346917 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" containerName="sg-core" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.350256 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.355377 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.355560 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.355664 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.374777 4603 scope.go:117] "RemoveContainer" containerID="535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.378294 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.400595 4603 scope.go:117] "RemoveContainer" containerID="fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.402742 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9\": container with ID starting with fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9 not found: ID does not exist" containerID="fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.402785 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9"} err="failed to get container status \"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9\": rpc error: code = NotFound desc = could not find container \"fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9\": container with ID starting with fd9a001d1fda2be58aa5bb987b2ca118c0966da32423a41c273f34dba25e31d9 not found: ID does not exist" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.402810 4603 scope.go:117] "RemoveContainer" containerID="d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.403373 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7\": container with ID starting with d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7 not found: ID does not exist" containerID="d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.403396 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7"} err="failed to get container status \"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7\": rpc error: code = NotFound desc = could not find container \"d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7\": container with ID starting with d1988232cfb5f7f0f2d685449d03fbe17349fe6ebc8db5988accda0dcf1957e7 not found: ID does not exist" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.403411 4603 scope.go:117] "RemoveContainer" containerID="23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.403642 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3\": container with ID starting with 23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3 not found: ID does not exist" containerID="23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.403660 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3"} err="failed to get container status \"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3\": rpc error: code = NotFound desc = could not find container \"23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3\": container with ID starting with 23f75570bf58289be04f83c981cfa413850876ea2c71bf08a22926016b0020a3 not found: ID does not exist" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.403674 4603 scope.go:117] "RemoveContainer" containerID="535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6" Sep 30 20:09:28 crc kubenswrapper[4603]: E0930 20:09:28.403981 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6\": container with ID starting with 535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6 not found: ID does not exist" containerID="535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.404002 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6"} err="failed to get container status \"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6\": rpc error: code = NotFound desc = could not find container \"535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6\": container with ID starting with 535b0b8461ba22239833ed03c79951696c1bef1e431dc833dabcf9b1ecfd70d6 not found: ID does not exist" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470105 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470236 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-run-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470275 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-scripts\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470300 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-config-data\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470320 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470487 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470541 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-log-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.470584 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn7h9\" (UniqueName: \"kubernetes.io/projected/c7365d55-02e8-49ff-a924-590c17d22105-kube-api-access-nn7h9\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572106 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572190 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-run-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572231 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-scripts\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572292 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-config-data\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572313 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572371 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572415 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-log-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.572451 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn7h9\" (UniqueName: \"kubernetes.io/projected/c7365d55-02e8-49ff-a924-590c17d22105-kube-api-access-nn7h9\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.573764 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-run-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.574123 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7365d55-02e8-49ff-a924-590c17d22105-log-httpd\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.577368 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-config-data\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.577788 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-scripts\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.577980 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.578345 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.583476 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7365d55-02e8-49ff-a924-590c17d22105-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.596504 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn7h9\" (UniqueName: \"kubernetes.io/projected/c7365d55-02e8-49ff-a924-590c17d22105-kube-api-access-nn7h9\") pod \"ceilometer-0\" (UID: \"c7365d55-02e8-49ff-a924-590c17d22105\") " pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.668145 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.775930 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ca325a7-0ea9-42b1-8401-102f1a20e9a1" path="/var/lib/kubelet/pods/3ca325a7-0ea9-42b1-8401-102f1a20e9a1/volumes" Sep 30 20:09:28 crc kubenswrapper[4603]: I0930 20:09:28.776724 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc47f1d4-1ac9-4670-974f-00b6092fcf95" path="/var/lib/kubelet/pods/fc47f1d4-1ac9-4670-974f-00b6092fcf95/volumes" Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.033516 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rs88k" event={"ID":"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8","Type":"ContainerStarted","Data":"17a827c150a76ea6ef396ff0efb6cd39888bde6a4be127c234fafcbe37d1b3f6"} Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.033923 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rs88k" event={"ID":"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8","Type":"ContainerStarted","Data":"8f6144fb8c4add41efb265b85cb58a06c833a97ab2d659aa9a46d54a9470b1c7"} Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.038951 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerStarted","Data":"e4ea2d236ef8cccc2b3dfc21aedd5fead68fc49c2034a66dc07a4f7dec1cdb5f"} Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.039001 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerStarted","Data":"1f3d9c106163511c1936d6c67cb46b29440449a6cadd99ab4383b8a22d752c58"} Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.039015 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerStarted","Data":"e2e45e103fedb217d7e7f9137af4575b1e365ae7a8bb6bea7a1f8deb31e082d9"} Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.065917 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rs88k" podStartSLOduration=2.065902487 podStartE2EDuration="2.065902487s" podCreationTimestamp="2025-09-30 20:09:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:29.06494274 +0000 UTC m=+1371.003401578" watchObservedRunningTime="2025-09-30 20:09:29.065902487 +0000 UTC m=+1371.004361305" Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.100329 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.100309021 podStartE2EDuration="3.100309021s" podCreationTimestamp="2025-09-30 20:09:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:29.088909785 +0000 UTC m=+1371.027368603" watchObservedRunningTime="2025-09-30 20:09:29.100309021 +0000 UTC m=+1371.038767839" Sep 30 20:09:29 crc kubenswrapper[4603]: I0930 20:09:29.169746 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:09:29 crc kubenswrapper[4603]: W0930 20:09:29.179267 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7365d55_02e8_49ff_a924_590c17d22105.slice/crio-9a2f4ccefa65e36535c6b4fc9bb56113ca2f44ea510567631aecf7debc0949cd WatchSource:0}: Error finding container 9a2f4ccefa65e36535c6b4fc9bb56113ca2f44ea510567631aecf7debc0949cd: Status 404 returned error can't find the container with id 9a2f4ccefa65e36535c6b4fc9bb56113ca2f44ea510567631aecf7debc0949cd Sep 30 20:09:30 crc kubenswrapper[4603]: I0930 20:09:30.071122 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7365d55-02e8-49ff-a924-590c17d22105","Type":"ContainerStarted","Data":"9a2f4ccefa65e36535c6b4fc9bb56113ca2f44ea510567631aecf7debc0949cd"} Sep 30 20:09:30 crc kubenswrapper[4603]: I0930 20:09:30.381430 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:09:30 crc kubenswrapper[4603]: I0930 20:09:30.482774 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:09:30 crc kubenswrapper[4603]: I0930 20:09:30.483038 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="dnsmasq-dns" containerID="cri-o://2a5cae4640a91cbeac4e709e92f8ea738a5939cf9336230d90eaf508fafcce52" gracePeriod=10 Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.084546 4603 generic.go:334] "Generic (PLEG): container finished" podID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerID="2a5cae4640a91cbeac4e709e92f8ea738a5939cf9336230d90eaf508fafcce52" exitCode=0 Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.084745 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" event={"ID":"af4c3ff4-bc2f-47f8-8bd2-f074eb888943","Type":"ContainerDied","Data":"2a5cae4640a91cbeac4e709e92f8ea738a5939cf9336230d90eaf508fafcce52"} Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.094931 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7365d55-02e8-49ff-a924-590c17d22105","Type":"ContainerStarted","Data":"05e6de07fb398757a7e42e20b10515f4146d345fbf6df0685c16d36eb1e2767c"} Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.746387 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.847767 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.847857 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.847936 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.847957 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.847993 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.848020 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5982k\" (UniqueName: \"kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k\") pod \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\" (UID: \"af4c3ff4-bc2f-47f8-8bd2-f074eb888943\") " Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.920449 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k" (OuterVolumeSpecName: "kube-api-access-5982k") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "kube-api-access-5982k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.952475 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.953373 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:31 crc kubenswrapper[4603]: I0930 20:09:31.953399 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5982k\" (UniqueName: \"kubernetes.io/projected/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-kube-api-access-5982k\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.014381 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.016824 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.031396 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.045795 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config" (OuterVolumeSpecName: "config") pod "af4c3ff4-bc2f-47f8-8bd2-f074eb888943" (UID: "af4c3ff4-bc2f-47f8-8bd2-f074eb888943"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.055492 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.055533 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.055546 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.055558 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af4c3ff4-bc2f-47f8-8bd2-f074eb888943-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.137265 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7365d55-02e8-49ff-a924-590c17d22105","Type":"ContainerStarted","Data":"b9968762e13644ea27716ccddd1d2fa310602defc2e7baa8aace7dd4b7cd39ef"} Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.139254 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" event={"ID":"af4c3ff4-bc2f-47f8-8bd2-f074eb888943","Type":"ContainerDied","Data":"afcf12aa1d8cada8bb2caec341559b29de412f84aecd7f820782d13228d9d3d1"} Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.139300 4603 scope.go:117] "RemoveContainer" containerID="2a5cae4640a91cbeac4e709e92f8ea738a5939cf9336230d90eaf508fafcce52" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.139693 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-xvn9n" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.171743 4603 scope.go:117] "RemoveContainer" containerID="5abaa07b4ed6714d046bc1e21a13131057564bd7f6518aedda47d8ee9523eb1f" Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.185342 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.201452 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-xvn9n"] Sep 30 20:09:32 crc kubenswrapper[4603]: I0930 20:09:32.782042 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" path="/var/lib/kubelet/pods/af4c3ff4-bc2f-47f8-8bd2-f074eb888943/volumes" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.193686 4603 generic.go:334] "Generic (PLEG): container finished" podID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerID="8d002b92e3d74d3d7e892e05a13ecb49ba4438c9d0c3d1ad87f0336586cd4f40" exitCode=137 Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.193950 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerDied","Data":"8d002b92e3d74d3d7e892e05a13ecb49ba4438c9d0c3d1ad87f0336586cd4f40"} Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.501344 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.598835 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.598935 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.598967 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.599007 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.599031 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s72jt\" (UniqueName: \"kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.599048 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.599089 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key\") pod \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\" (UID: \"e53c6d5a-9a76-4d2e-b821-68c74620f22b\") " Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.602119 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs" (OuterVolumeSpecName: "logs") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.627505 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.629721 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt" (OuterVolumeSpecName: "kube-api-access-s72jt") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "kube-api-access-s72jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.657460 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data" (OuterVolumeSpecName: "config-data") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.665294 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.668856 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts" (OuterVolumeSpecName: "scripts") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.684445 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "e53c6d5a-9a76-4d2e-b821-68c74620f22b" (UID: "e53c6d5a-9a76-4d2e-b821-68c74620f22b"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701257 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701290 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s72jt\" (UniqueName: \"kubernetes.io/projected/e53c6d5a-9a76-4d2e-b821-68c74620f22b-kube-api-access-s72jt\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701302 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e53c6d5a-9a76-4d2e-b821-68c74620f22b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701313 4603 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701321 4603 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701332 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53c6d5a-9a76-4d2e-b821-68c74620f22b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:33 crc kubenswrapper[4603]: I0930 20:09:33.701340 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e53c6d5a-9a76-4d2e-b821-68c74620f22b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.203880 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8575cd6744-wt57f" event={"ID":"e53c6d5a-9a76-4d2e-b821-68c74620f22b","Type":"ContainerDied","Data":"c5e5db1a377e4b952c66a2cb0b717fb855832f51a8d3736e9e51bab750b790f7"} Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.204835 4603 scope.go:117] "RemoveContainer" containerID="9e154543257947fee23e9ce76eec503f256afbb70115d93c4c6b2cc1ce92634e" Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.203898 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8575cd6744-wt57f" Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.206223 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7365d55-02e8-49ff-a924-590c17d22105","Type":"ContainerStarted","Data":"d8c65dd0a231b8ca26c8c71a5f573d547781712d5c3ce4a6b8180ee986adbcaf"} Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.236398 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.247794 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-8575cd6744-wt57f"] Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.390465 4603 scope.go:117] "RemoveContainer" containerID="8d002b92e3d74d3d7e892e05a13ecb49ba4438c9d0c3d1ad87f0336586cd4f40" Sep 30 20:09:34 crc kubenswrapper[4603]: I0930 20:09:34.777564 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" path="/var/lib/kubelet/pods/e53c6d5a-9a76-4d2e-b821-68c74620f22b/volumes" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.028617 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029114 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon-log" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029128 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon-log" Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029148 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029155 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029261 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="dnsmasq-dns" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029271 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="dnsmasq-dns" Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029292 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="init" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029299 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="init" Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029347 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029355 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029565 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029585 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029601 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon-log" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029617 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029646 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="af4c3ff4-bc2f-47f8-8bd2-f074eb888943" containerName="dnsmasq-dns" Sep 30 20:09:35 crc kubenswrapper[4603]: E0930 20:09:35.029872 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.029880 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53c6d5a-9a76-4d2e-b821-68c74620f22b" containerName="horizon" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.033622 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.041263 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.131867 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.132259 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.132442 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87l5x\" (UniqueName: \"kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.217281 4603 generic.go:334] "Generic (PLEG): container finished" podID="177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" containerID="17a827c150a76ea6ef396ff0efb6cd39888bde6a4be127c234fafcbe37d1b3f6" exitCode=0 Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.217333 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rs88k" event={"ID":"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8","Type":"ContainerDied","Data":"17a827c150a76ea6ef396ff0efb6cd39888bde6a4be127c234fafcbe37d1b3f6"} Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.234265 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.234327 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.234378 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87l5x\" (UniqueName: \"kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.234745 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.234853 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.264009 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87l5x\" (UniqueName: \"kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x\") pod \"redhat-operators-nlw6j\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.363620 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:35 crc kubenswrapper[4603]: W0930 20:09:35.898879 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45108d4e_14c1_4d24_99d5_fc395f5e14fd.slice/crio-1a8b458c298001725d71762b8215b4231492081b7a76303daefaef261e7f217f WatchSource:0}: Error finding container 1a8b458c298001725d71762b8215b4231492081b7a76303daefaef261e7f217f: Status 404 returned error can't find the container with id 1a8b458c298001725d71762b8215b4231492081b7a76303daefaef261e7f217f Sep 30 20:09:35 crc kubenswrapper[4603]: I0930 20:09:35.900309 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.227484 4603 generic.go:334] "Generic (PLEG): container finished" podID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerID="1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117" exitCode=0 Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.227577 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerDied","Data":"1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117"} Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.228691 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerStarted","Data":"1a8b458c298001725d71762b8215b4231492081b7a76303daefaef261e7f217f"} Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.234230 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c7365d55-02e8-49ff-a924-590c17d22105","Type":"ContainerStarted","Data":"54f1aa69ce4bc0d7e14b2d8028aa1ed29f2816d58d9a9fa2328e67ba96b70438"} Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.234298 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.293817 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.181928063 podStartE2EDuration="8.29379601s" podCreationTimestamp="2025-09-30 20:09:28 +0000 UTC" firstStartedPulling="2025-09-30 20:09:29.183413276 +0000 UTC m=+1371.121872094" lastFinishedPulling="2025-09-30 20:09:35.295281223 +0000 UTC m=+1377.233740041" observedRunningTime="2025-09-30 20:09:36.285070558 +0000 UTC m=+1378.223529406" watchObservedRunningTime="2025-09-30 20:09:36.29379601 +0000 UTC m=+1378.232254828" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.649863 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.768144 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts\") pod \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.768319 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle\") pod \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.768347 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data\") pod \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.768466 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqtv5\" (UniqueName: \"kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5\") pod \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\" (UID: \"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8\") " Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.773280 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts" (OuterVolumeSpecName: "scripts") pod "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" (UID: "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.788046 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5" (OuterVolumeSpecName: "kube-api-access-vqtv5") pod "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" (UID: "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8"). InnerVolumeSpecName "kube-api-access-vqtv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.794899 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" (UID: "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.801518 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data" (OuterVolumeSpecName: "config-data") pod "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" (UID: "177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.871092 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqtv5\" (UniqueName: \"kubernetes.io/projected/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-kube-api-access-vqtv5\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.874242 4603 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.874266 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:36 crc kubenswrapper[4603]: I0930 20:09:36.874276 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.245058 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rs88k" Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.247295 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rs88k" event={"ID":"177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8","Type":"ContainerDied","Data":"8f6144fb8c4add41efb265b85cb58a06c833a97ab2d659aa9a46d54a9470b1c7"} Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.247321 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f6144fb8c4add41efb265b85cb58a06c833a97ab2d659aa9a46d54a9470b1c7" Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.273421 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.273476 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.475247 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.500430 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.500623 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerName="nova-scheduler-scheduler" containerID="cri-o://addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" gracePeriod=30 Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.546352 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.546579 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" containerID="cri-o://730758919b3fd9506e0ce7dfbca68a3f0ad27785a36ad6a11b51985aeea59f19" gracePeriod=30 Sep 30 20:09:37 crc kubenswrapper[4603]: I0930 20:09:37.546886 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" containerID="cri-o://2c347a80d788af89639deb110ea77950f792327066536a2fe1518aed5767ccd8" gracePeriod=30 Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.255240 4603 generic.go:334] "Generic (PLEG): container finished" podID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerID="730758919b3fd9506e0ce7dfbca68a3f0ad27785a36ad6a11b51985aeea59f19" exitCode=143 Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.255280 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerDied","Data":"730758919b3fd9506e0ce7dfbca68a3f0ad27785a36ad6a11b51985aeea59f19"} Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.258332 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-log" containerID="cri-o://1f3d9c106163511c1936d6c67cb46b29440449a6cadd99ab4383b8a22d752c58" gracePeriod=30 Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.258533 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerStarted","Data":"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f"} Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.258591 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-api" containerID="cri-o://e4ea2d236ef8cccc2b3dfc21aedd5fead68fc49c2034a66dc07a4f7dec1cdb5f" gracePeriod=30 Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.264694 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Sep 30 20:09:38 crc kubenswrapper[4603]: I0930 20:09:38.264694 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Sep 30 20:09:39 crc kubenswrapper[4603]: I0930 20:09:39.278204 4603 generic.go:334] "Generic (PLEG): container finished" podID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerID="1f3d9c106163511c1936d6c67cb46b29440449a6cadd99ab4383b8a22d752c58" exitCode=143 Sep 30 20:09:39 crc kubenswrapper[4603]: I0930 20:09:39.279141 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerDied","Data":"1f3d9c106163511c1936d6c67cb46b29440449a6cadd99ab4383b8a22d752c58"} Sep 30 20:09:40 crc kubenswrapper[4603]: E0930 20:09:40.347926 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:09:40 crc kubenswrapper[4603]: E0930 20:09:40.353669 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:09:40 crc kubenswrapper[4603]: E0930 20:09:40.358946 4603 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:09:40 crc kubenswrapper[4603]: E0930 20:09:40.359034 4603 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerName="nova-scheduler-scheduler" Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.150189 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": dial tcp 10.217.0.194:8775: connect: connection refused" Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.150150 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": dial tcp 10.217.0.194:8775: connect: connection refused" Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.299826 4603 generic.go:334] "Generic (PLEG): container finished" podID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerID="addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" exitCode=0 Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.299906 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7e924a-9658-47f8-8c6c-ca0a62758e97","Type":"ContainerDied","Data":"addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141"} Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.301455 4603 generic.go:334] "Generic (PLEG): container finished" podID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerID="2c347a80d788af89639deb110ea77950f792327066536a2fe1518aed5767ccd8" exitCode=0 Sep 30 20:09:41 crc kubenswrapper[4603]: I0930 20:09:41.301481 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerDied","Data":"2c347a80d788af89639deb110ea77950f792327066536a2fe1518aed5767ccd8"} Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.431563 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.570939 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.602381 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle\") pod \"4cd9459d-c432-4eeb-9822-6c38efda56e6\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.602460 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n79jn\" (UniqueName: \"kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn\") pod \"4cd9459d-c432-4eeb-9822-6c38efda56e6\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.602548 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data\") pod \"4cd9459d-c432-4eeb-9822-6c38efda56e6\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.602601 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs\") pod \"4cd9459d-c432-4eeb-9822-6c38efda56e6\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.602661 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs\") pod \"4cd9459d-c432-4eeb-9822-6c38efda56e6\" (UID: \"4cd9459d-c432-4eeb-9822-6c38efda56e6\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.603585 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs" (OuterVolumeSpecName: "logs") pod "4cd9459d-c432-4eeb-9822-6c38efda56e6" (UID: "4cd9459d-c432-4eeb-9822-6c38efda56e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.644735 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn" (OuterVolumeSpecName: "kube-api-access-n79jn") pod "4cd9459d-c432-4eeb-9822-6c38efda56e6" (UID: "4cd9459d-c432-4eeb-9822-6c38efda56e6"). InnerVolumeSpecName "kube-api-access-n79jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.674691 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data" (OuterVolumeSpecName: "config-data") pod "4cd9459d-c432-4eeb-9822-6c38efda56e6" (UID: "4cd9459d-c432-4eeb-9822-6c38efda56e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.682927 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cd9459d-c432-4eeb-9822-6c38efda56e6" (UID: "4cd9459d-c432-4eeb-9822-6c38efda56e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704311 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data\") pod \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704437 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle\") pod \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704550 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qftbw\" (UniqueName: \"kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw\") pod \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\" (UID: \"4b7e924a-9658-47f8-8c6c-ca0a62758e97\") " Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704930 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704945 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4cd9459d-c432-4eeb-9822-6c38efda56e6-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704953 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.704964 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n79jn\" (UniqueName: \"kubernetes.io/projected/4cd9459d-c432-4eeb-9822-6c38efda56e6-kube-api-access-n79jn\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.706356 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4cd9459d-c432-4eeb-9822-6c38efda56e6" (UID: "4cd9459d-c432-4eeb-9822-6c38efda56e6"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.708436 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw" (OuterVolumeSpecName: "kube-api-access-qftbw") pod "4b7e924a-9658-47f8-8c6c-ca0a62758e97" (UID: "4b7e924a-9658-47f8-8c6c-ca0a62758e97"). InnerVolumeSpecName "kube-api-access-qftbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.730342 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data" (OuterVolumeSpecName: "config-data") pod "4b7e924a-9658-47f8-8c6c-ca0a62758e97" (UID: "4b7e924a-9658-47f8-8c6c-ca0a62758e97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.730568 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b7e924a-9658-47f8-8c6c-ca0a62758e97" (UID: "4b7e924a-9658-47f8-8c6c-ca0a62758e97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.806401 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.806632 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b7e924a-9658-47f8-8c6c-ca0a62758e97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.806727 4603 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cd9459d-c432-4eeb-9822-6c38efda56e6-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:42 crc kubenswrapper[4603]: I0930 20:09:42.806781 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qftbw\" (UniqueName: \"kubernetes.io/projected/4b7e924a-9658-47f8-8c6c-ca0a62758e97-kube-api-access-qftbw\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.319583 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4b7e924a-9658-47f8-8c6c-ca0a62758e97","Type":"ContainerDied","Data":"27b72fe42651dd7af16fdb44454ff8308bd95235af45d5899f5331aea8e4ef7d"} Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.319629 4603 scope.go:117] "RemoveContainer" containerID="addbd530dffcdea7625c5057d47a650af748103371e4fae9d46e9a00fe882141" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.319728 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.322706 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4cd9459d-c432-4eeb-9822-6c38efda56e6","Type":"ContainerDied","Data":"86f369f2b9f7e3bb26ec3a926470180d7513ded8b1a2b5b07435ca83a6d8be82"} Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.322788 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.408611 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.410141 4603 scope.go:117] "RemoveContainer" containerID="2c347a80d788af89639deb110ea77950f792327066536a2fe1518aed5767ccd8" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.426428 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.438930 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.460499 4603 scope.go:117] "RemoveContainer" containerID="730758919b3fd9506e0ce7dfbca68a3f0ad27785a36ad6a11b51985aeea59f19" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.511489 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523129 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: E0930 20:09:43.523686 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" containerName="nova-manage" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523706 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" containerName="nova-manage" Sep 30 20:09:43 crc kubenswrapper[4603]: E0930 20:09:43.523732 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerName="nova-scheduler-scheduler" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523739 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerName="nova-scheduler-scheduler" Sep 30 20:09:43 crc kubenswrapper[4603]: E0930 20:09:43.523750 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523757 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" Sep 30 20:09:43 crc kubenswrapper[4603]: E0930 20:09:43.523773 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523781 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523952 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-log" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523976 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" containerName="nova-metadata-metadata" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.523990 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" containerName="nova-scheduler-scheduler" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.524005 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" containerName="nova-manage" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.524676 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.526870 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.546083 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.548554 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.550074 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.550496 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.555137 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.564621 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568343 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqf9z\" (UniqueName: \"kubernetes.io/projected/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-kube-api-access-fqf9z\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568432 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568462 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568485 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-logs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568513 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568728 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fd67\" (UniqueName: \"kubernetes.io/projected/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-kube-api-access-4fd67\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568777 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-config-data\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.568825 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-config-data\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670133 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670184 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-logs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670214 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670257 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fd67\" (UniqueName: \"kubernetes.io/projected/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-kube-api-access-4fd67\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670281 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-config-data\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670304 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-config-data\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670357 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqf9z\" (UniqueName: \"kubernetes.io/projected/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-kube-api-access-fqf9z\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.670880 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-logs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.671699 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.675781 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.677209 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-config-data\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.679444 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.684497 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.685783 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-config-data\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.690104 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqf9z\" (UniqueName: \"kubernetes.io/projected/8e3c4b82-8309-4c22-af40-2d9c530b0ef7-kube-api-access-fqf9z\") pod \"nova-scheduler-0\" (UID: \"8e3c4b82-8309-4c22-af40-2d9c530b0ef7\") " pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.690801 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fd67\" (UniqueName: \"kubernetes.io/projected/cc2cc2ea-71b3-4874-8bcc-8504cb63b192-kube-api-access-4fd67\") pod \"nova-metadata-0\" (UID: \"cc2cc2ea-71b3-4874-8bcc-8504cb63b192\") " pod="openstack/nova-metadata-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.844742 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:09:43 crc kubenswrapper[4603]: I0930 20:09:43.866979 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:09:44 crc kubenswrapper[4603]: I0930 20:09:44.386172 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:09:44 crc kubenswrapper[4603]: W0930 20:09:44.387853 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e3c4b82_8309_4c22_af40_2d9c530b0ef7.slice/crio-80e603c0cbd03db65274b83e8b9ec23c8c938b6cfbf3f51f9d9da7a34bd22932 WatchSource:0}: Error finding container 80e603c0cbd03db65274b83e8b9ec23c8c938b6cfbf3f51f9d9da7a34bd22932: Status 404 returned error can't find the container with id 80e603c0cbd03db65274b83e8b9ec23c8c938b6cfbf3f51f9d9da7a34bd22932 Sep 30 20:09:44 crc kubenswrapper[4603]: I0930 20:09:44.491628 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:09:44 crc kubenswrapper[4603]: W0930 20:09:44.492582 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc2cc2ea_71b3_4874_8bcc_8504cb63b192.slice/crio-0e3b4be32131379d015b1f2870800bb7152f5c10b56d888a8bfb704ed841f45d WatchSource:0}: Error finding container 0e3b4be32131379d015b1f2870800bb7152f5c10b56d888a8bfb704ed841f45d: Status 404 returned error can't find the container with id 0e3b4be32131379d015b1f2870800bb7152f5c10b56d888a8bfb704ed841f45d Sep 30 20:09:44 crc kubenswrapper[4603]: I0930 20:09:44.775187 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b7e924a-9658-47f8-8c6c-ca0a62758e97" path="/var/lib/kubelet/pods/4b7e924a-9658-47f8-8c6c-ca0a62758e97/volumes" Sep 30 20:09:44 crc kubenswrapper[4603]: I0930 20:09:44.776299 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cd9459d-c432-4eeb-9822-6c38efda56e6" path="/var/lib/kubelet/pods/4cd9459d-c432-4eeb-9822-6c38efda56e6/volumes" Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.344284 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc2cc2ea-71b3-4874-8bcc-8504cb63b192","Type":"ContainerStarted","Data":"d9a9b318b8120440598018a4d058a1eaa104997d0a6092dbbc3fb161ac5a78e3"} Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.344353 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc2cc2ea-71b3-4874-8bcc-8504cb63b192","Type":"ContainerStarted","Data":"e8d4b340e87e8f8b0defd9e3dedf68aef4145de38b6ea9379fd87fd6531716ab"} Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.344380 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc2cc2ea-71b3-4874-8bcc-8504cb63b192","Type":"ContainerStarted","Data":"0e3b4be32131379d015b1f2870800bb7152f5c10b56d888a8bfb704ed841f45d"} Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.347090 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8e3c4b82-8309-4c22-af40-2d9c530b0ef7","Type":"ContainerStarted","Data":"2d6e0102e407fa771881ba0181eddfa28efc0ff66a6e57c4182d2736384040a0"} Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.347145 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8e3c4b82-8309-4c22-af40-2d9c530b0ef7","Type":"ContainerStarted","Data":"80e603c0cbd03db65274b83e8b9ec23c8c938b6cfbf3f51f9d9da7a34bd22932"} Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.385845 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.385822983 podStartE2EDuration="2.385822983s" podCreationTimestamp="2025-09-30 20:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:45.378077378 +0000 UTC m=+1387.316536236" watchObservedRunningTime="2025-09-30 20:09:45.385822983 +0000 UTC m=+1387.324281841" Sep 30 20:09:45 crc kubenswrapper[4603]: I0930 20:09:45.404399 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.404372327 podStartE2EDuration="2.404372327s" podCreationTimestamp="2025-09-30 20:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:45.399432841 +0000 UTC m=+1387.337891689" watchObservedRunningTime="2025-09-30 20:09:45.404372327 +0000 UTC m=+1387.342831165" Sep 30 20:09:46 crc kubenswrapper[4603]: I0930 20:09:46.360474 4603 generic.go:334] "Generic (PLEG): container finished" podID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerID="e4ea2d236ef8cccc2b3dfc21aedd5fead68fc49c2034a66dc07a4f7dec1cdb5f" exitCode=0 Sep 30 20:09:46 crc kubenswrapper[4603]: I0930 20:09:46.360506 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerDied","Data":"e4ea2d236ef8cccc2b3dfc21aedd5fead68fc49c2034a66dc07a4f7dec1cdb5f"} Sep 30 20:09:46 crc kubenswrapper[4603]: I0930 20:09:46.994222 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.139825 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.140012 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.140494 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs" (OuterVolumeSpecName: "logs") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.140042 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.140596 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.141004 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g99gv\" (UniqueName: \"kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.141126 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data\") pod \"225cf59a-cc48-48e6-8d2d-66839d09121d\" (UID: \"225cf59a-cc48-48e6-8d2d-66839d09121d\") " Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.141840 4603 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/225cf59a-cc48-48e6-8d2d-66839d09121d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.158692 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv" (OuterVolumeSpecName: "kube-api-access-g99gv") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "kube-api-access-g99gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.208500 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.211382 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.216744 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data" (OuterVolumeSpecName: "config-data") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.232453 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "225cf59a-cc48-48e6-8d2d-66839d09121d" (UID: "225cf59a-cc48-48e6-8d2d-66839d09121d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.244392 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.244419 4603 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.244441 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.244450 4603 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/225cf59a-cc48-48e6-8d2d-66839d09121d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.244459 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g99gv\" (UniqueName: \"kubernetes.io/projected/225cf59a-cc48-48e6-8d2d-66839d09121d-kube-api-access-g99gv\") on node \"crc\" DevicePath \"\"" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.371210 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"225cf59a-cc48-48e6-8d2d-66839d09121d","Type":"ContainerDied","Data":"e2e45e103fedb217d7e7f9137af4575b1e365ae7a8bb6bea7a1f8deb31e082d9"} Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.371267 4603 scope.go:117] "RemoveContainer" containerID="e4ea2d236ef8cccc2b3dfc21aedd5fead68fc49c2034a66dc07a4f7dec1cdb5f" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.371387 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.399585 4603 scope.go:117] "RemoveContainer" containerID="1f3d9c106163511c1936d6c67cb46b29440449a6cadd99ab4383b8a22d752c58" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.408737 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.430680 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.458665 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:47 crc kubenswrapper[4603]: E0930 20:09:47.459155 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-log" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.459203 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-log" Sep 30 20:09:47 crc kubenswrapper[4603]: E0930 20:09:47.459222 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-api" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.459232 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-api" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.459527 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-log" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.459568 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" containerName="nova-api-api" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.460841 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.464820 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.465038 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.465153 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.474737 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651157 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78jzh\" (UniqueName: \"kubernetes.io/projected/bee4fa0d-d809-44ef-b123-a8ec31dda906-kube-api-access-78jzh\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651426 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651501 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651624 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-public-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651701 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-config-data\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.651726 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bee4fa0d-d809-44ef-b123-a8ec31dda906-logs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753066 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-config-data\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753346 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bee4fa0d-d809-44ef-b123-a8ec31dda906-logs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753457 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78jzh\" (UniqueName: \"kubernetes.io/projected/bee4fa0d-d809-44ef-b123-a8ec31dda906-kube-api-access-78jzh\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753533 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753653 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753738 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bee4fa0d-d809-44ef-b123-a8ec31dda906-logs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.753851 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-public-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.759141 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-internal-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.759540 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-public-tls-certs\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.760483 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-config-data\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.765759 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bee4fa0d-d809-44ef-b123-a8ec31dda906-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.769677 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78jzh\" (UniqueName: \"kubernetes.io/projected/bee4fa0d-d809-44ef-b123-a8ec31dda906-kube-api-access-78jzh\") pod \"nova-api-0\" (UID: \"bee4fa0d-d809-44ef-b123-a8ec31dda906\") " pod="openstack/nova-api-0" Sep 30 20:09:47 crc kubenswrapper[4603]: I0930 20:09:47.787151 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.291936 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:09:48 crc kubenswrapper[4603]: W0930 20:09:48.302859 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbee4fa0d_d809_44ef_b123_a8ec31dda906.slice/crio-a49eaa4d3b174b0c79b5222b27601779d332d71020f9b48b770e1a35037c42d0 WatchSource:0}: Error finding container a49eaa4d3b174b0c79b5222b27601779d332d71020f9b48b770e1a35037c42d0: Status 404 returned error can't find the container with id a49eaa4d3b174b0c79b5222b27601779d332d71020f9b48b770e1a35037c42d0 Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.388114 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bee4fa0d-d809-44ef-b123-a8ec31dda906","Type":"ContainerStarted","Data":"a49eaa4d3b174b0c79b5222b27601779d332d71020f9b48b770e1a35037c42d0"} Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.777299 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="225cf59a-cc48-48e6-8d2d-66839d09121d" path="/var/lib/kubelet/pods/225cf59a-cc48-48e6-8d2d-66839d09121d/volumes" Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.845934 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.867916 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:09:48 crc kubenswrapper[4603]: I0930 20:09:48.867981 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:09:49 crc kubenswrapper[4603]: I0930 20:09:49.405386 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bee4fa0d-d809-44ef-b123-a8ec31dda906","Type":"ContainerStarted","Data":"130121c8afdc32fb70fa8af6181b6565255bf86b06745f1a53a1a99c0e6c37c3"} Sep 30 20:09:49 crc kubenswrapper[4603]: I0930 20:09:49.405434 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bee4fa0d-d809-44ef-b123-a8ec31dda906","Type":"ContainerStarted","Data":"a14bdbdbaf6ee4f93cd2a0073836bdd8a3aac55452fa1eed1e72f2117eca716f"} Sep 30 20:09:49 crc kubenswrapper[4603]: I0930 20:09:49.435967 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.435949329 podStartE2EDuration="2.435949329s" podCreationTimestamp="2025-09-30 20:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:09:49.431934258 +0000 UTC m=+1391.370393146" watchObservedRunningTime="2025-09-30 20:09:49.435949329 +0000 UTC m=+1391.374408147" Sep 30 20:09:52 crc kubenswrapper[4603]: I0930 20:09:52.439274 4603 generic.go:334] "Generic (PLEG): container finished" podID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerID="ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f" exitCode=0 Sep 30 20:09:52 crc kubenswrapper[4603]: I0930 20:09:52.439460 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerDied","Data":"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f"} Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.450971 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerStarted","Data":"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70"} Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.474597 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nlw6j" podStartSLOduration=1.600988682 podStartE2EDuration="18.474574776s" podCreationTimestamp="2025-09-30 20:09:35 +0000 UTC" firstStartedPulling="2025-09-30 20:09:36.231480872 +0000 UTC m=+1378.169939690" lastFinishedPulling="2025-09-30 20:09:53.105066966 +0000 UTC m=+1395.043525784" observedRunningTime="2025-09-30 20:09:53.472350984 +0000 UTC m=+1395.410809822" watchObservedRunningTime="2025-09-30 20:09:53.474574776 +0000 UTC m=+1395.413033604" Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.845465 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.867083 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.867312 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:09:53 crc kubenswrapper[4603]: I0930 20:09:53.875051 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:09:54 crc kubenswrapper[4603]: I0930 20:09:54.507654 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:09:54 crc kubenswrapper[4603]: I0930 20:09:54.879352 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cc2cc2ea-71b3-4874-8bcc-8504cb63b192" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:54 crc kubenswrapper[4603]: I0930 20:09:54.879413 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="cc2cc2ea-71b3-4874-8bcc-8504cb63b192" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:55 crc kubenswrapper[4603]: I0930 20:09:55.365358 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:55 crc kubenswrapper[4603]: I0930 20:09:55.365419 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:09:56 crc kubenswrapper[4603]: I0930 20:09:56.413192 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlw6j" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" probeResult="failure" output=< Sep 30 20:09:56 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:09:56 crc kubenswrapper[4603]: > Sep 30 20:09:57 crc kubenswrapper[4603]: I0930 20:09:57.788410 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:57 crc kubenswrapper[4603]: I0930 20:09:57.788464 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:09:58 crc kubenswrapper[4603]: I0930 20:09:58.680017 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:09:58 crc kubenswrapper[4603]: I0930 20:09:58.809524 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bee4fa0d-d809-44ef-b123-a8ec31dda906" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:09:58 crc kubenswrapper[4603]: I0930 20:09:58.809582 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bee4fa0d-d809-44ef-b123-a8ec31dda906" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:10:03 crc kubenswrapper[4603]: I0930 20:10:03.872819 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:10:03 crc kubenswrapper[4603]: I0930 20:10:03.873246 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:10:03 crc kubenswrapper[4603]: I0930 20:10:03.880241 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:10:03 crc kubenswrapper[4603]: I0930 20:10:03.882773 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:10:06 crc kubenswrapper[4603]: I0930 20:10:06.417784 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlw6j" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" probeResult="failure" output=< Sep 30 20:10:06 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:10:06 crc kubenswrapper[4603]: > Sep 30 20:10:07 crc kubenswrapper[4603]: I0930 20:10:07.796642 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:10:07 crc kubenswrapper[4603]: I0930 20:10:07.797127 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:10:07 crc kubenswrapper[4603]: I0930 20:10:07.801331 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:10:07 crc kubenswrapper[4603]: I0930 20:10:07.813229 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:10:08 crc kubenswrapper[4603]: I0930 20:10:08.573745 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:10:08 crc kubenswrapper[4603]: I0930 20:10:08.583815 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:10:16 crc kubenswrapper[4603]: I0930 20:10:16.388078 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:16 crc kubenswrapper[4603]: I0930 20:10:16.442721 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlw6j" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" probeResult="failure" output=< Sep 30 20:10:16 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:10:16 crc kubenswrapper[4603]: > Sep 30 20:10:17 crc kubenswrapper[4603]: I0930 20:10:17.994263 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:21 crc kubenswrapper[4603]: I0930 20:10:21.719538 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="rabbitmq" containerID="cri-o://13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd" gracePeriod=604795 Sep 30 20:10:23 crc kubenswrapper[4603]: I0930 20:10:23.242886 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="rabbitmq" containerID="cri-o://2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df" gracePeriod=604795 Sep 30 20:10:25 crc kubenswrapper[4603]: I0930 20:10:25.816149 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 30 20:10:26 crc kubenswrapper[4603]: I0930 20:10:26.022314 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Sep 30 20:10:26 crc kubenswrapper[4603]: I0930 20:10:26.427136 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlw6j" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" probeResult="failure" output=< Sep 30 20:10:26 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:10:26 crc kubenswrapper[4603]: > Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.399074 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.540889 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.540933 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgq9g\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541013 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541059 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541078 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541108 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541244 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541267 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541320 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541342 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.541371 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf\") pod \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\" (UID: \"0a887c33-ea6b-49e0-ade6-cc9df164ff32\") " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.545250 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.546119 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.546387 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.549454 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.551471 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g" (OuterVolumeSpecName: "kube-api-access-mgq9g") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "kube-api-access-mgq9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.557705 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.564396 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.578281 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info" (OuterVolumeSpecName: "pod-info") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.598045 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data" (OuterVolumeSpecName: "config-data") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643757 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643788 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643811 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643821 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643830 4603 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643838 4603 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0a887c33-ea6b-49e0-ade6-cc9df164ff32-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643848 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgq9g\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-kube-api-access-mgq9g\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643856 4603 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0a887c33-ea6b-49e0-ade6-cc9df164ff32-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.643865 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.669906 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf" (OuterVolumeSpecName: "server-conf") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.675457 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0a887c33-ea6b-49e0-ade6-cc9df164ff32" (UID: "0a887c33-ea6b-49e0-ade6-cc9df164ff32"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.675771 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.746432 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.746459 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0a887c33-ea6b-49e0-ade6-cc9df164ff32-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.746473 4603 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0a887c33-ea6b-49e0-ade6-cc9df164ff32-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.749698 4603 generic.go:334] "Generic (PLEG): container finished" podID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerID="13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd" exitCode=0 Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.749749 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerDied","Data":"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd"} Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.749772 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.749794 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0a887c33-ea6b-49e0-ade6-cc9df164ff32","Type":"ContainerDied","Data":"1646ae28707b9c6f2bd49fe88002ffaf4c54757bb7e7a03826efc9c71cbe7e13"} Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.749818 4603 scope.go:117] "RemoveContainer" containerID="13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.839806 4603 scope.go:117] "RemoveContainer" containerID="19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.876524 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.886291 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.899923 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:28 crc kubenswrapper[4603]: E0930 20:10:28.900490 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="setup-container" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.900512 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="setup-container" Sep 30 20:10:28 crc kubenswrapper[4603]: E0930 20:10:28.900529 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="rabbitmq" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.900536 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="rabbitmq" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.900771 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" containerName="rabbitmq" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.906116 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.906750 4603 scope.go:117] "RemoveContainer" containerID="13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd" Sep 30 20:10:28 crc kubenswrapper[4603]: E0930 20:10:28.907727 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd\": container with ID starting with 13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd not found: ID does not exist" containerID="13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.907849 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd"} err="failed to get container status \"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd\": rpc error: code = NotFound desc = could not find container \"13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd\": container with ID starting with 13c70880a0c6518e3797c87f8b32e79151c104f694a9b82685c1553aea7f07dd not found: ID does not exist" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.907931 4603 scope.go:117] "RemoveContainer" containerID="19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3" Sep 30 20:10:28 crc kubenswrapper[4603]: E0930 20:10:28.908299 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3\": container with ID starting with 19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3 not found: ID does not exist" containerID="19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.908383 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3"} err="failed to get container status \"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3\": rpc error: code = NotFound desc = could not find container \"19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3\": container with ID starting with 19e1f9bc1804091dbf03f9c812acb93330dda11d2e509ee80b19c2f5fa59e8c3 not found: ID does not exist" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.909326 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.909536 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.910940 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.911156 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.911353 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.911870 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.916536 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-m724n" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.922663 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961370 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961422 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djgtk\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-kube-api-access-djgtk\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961444 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c52e492d-a859-4989-b9f6-91d03979296b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961545 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961584 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-config-data\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961640 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c52e492d-a859-4989-b9f6-91d03979296b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961672 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961702 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961765 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961793 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:28 crc kubenswrapper[4603]: I0930 20:10:28.961848 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063759 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063835 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063861 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063884 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063911 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063927 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c52e492d-a859-4989-b9f6-91d03979296b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063943 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djgtk\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-kube-api-access-djgtk\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.063996 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.064021 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-config-data\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.064077 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c52e492d-a859-4989-b9f6-91d03979296b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.064097 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.064833 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.065101 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.065592 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.068462 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.069013 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-config-data\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.069933 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c52e492d-a859-4989-b9f6-91d03979296b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.073421 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c52e492d-a859-4989-b9f6-91d03979296b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.074778 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.075293 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.077811 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c52e492d-a859-4989-b9f6-91d03979296b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.082485 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djgtk\" (UniqueName: \"kubernetes.io/projected/c52e492d-a859-4989-b9f6-91d03979296b-kube-api-access-djgtk\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.109371 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"c52e492d-a859-4989-b9f6-91d03979296b\") " pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.230947 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.716017 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:10:29 crc kubenswrapper[4603]: W0930 20:10:29.729869 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc52e492d_a859_4989_b9f6_91d03979296b.slice/crio-e2acd58ed288b02d3671c14570c832829e1cfb832a80b0dfe93dadf551008d75 WatchSource:0}: Error finding container e2acd58ed288b02d3671c14570c832829e1cfb832a80b0dfe93dadf551008d75: Status 404 returned error can't find the container with id e2acd58ed288b02d3671c14570c832829e1cfb832a80b0dfe93dadf551008d75 Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.738903 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779143 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779217 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779251 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779295 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779347 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779439 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779459 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779482 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t69mr\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779540 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779573 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.779610 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data\") pod \"362ae718-7c2c-48c2-9710-bc3731aa6de8\" (UID: \"362ae718-7c2c-48c2-9710-bc3731aa6de8\") " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.781531 4603 generic.go:334] "Generic (PLEG): container finished" podID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerID="2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df" exitCode=0 Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.781585 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerDied","Data":"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df"} Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.781611 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"362ae718-7c2c-48c2-9710-bc3731aa6de8","Type":"ContainerDied","Data":"775779bd2dfd2e3818a8a38eb6405daedab991dc314086abc2bb96aa66f6e9ca"} Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.781627 4603 scope.go:117] "RemoveContainer" containerID="2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.781706 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.782489 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.799990 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c52e492d-a859-4989-b9f6-91d03979296b","Type":"ContainerStarted","Data":"e2acd58ed288b02d3671c14570c832829e1cfb832a80b0dfe93dadf551008d75"} Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.801561 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.802381 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.802576 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info" (OuterVolumeSpecName: "pod-info") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.809032 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.810452 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.813190 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.830154 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data" (OuterVolumeSpecName: "config-data") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.850400 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr" (OuterVolumeSpecName: "kube-api-access-t69mr") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "kube-api-access-t69mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.859416 4603 scope.go:117] "RemoveContainer" containerID="60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.875568 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf" (OuterVolumeSpecName: "server-conf") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883075 4603 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883102 4603 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/362ae718-7c2c-48c2-9710-bc3731aa6de8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883113 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883122 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883140 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883150 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883173 4603 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/362ae718-7c2c-48c2-9710-bc3731aa6de8-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883183 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883193 4603 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/362ae718-7c2c-48c2-9710-bc3731aa6de8-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.883200 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t69mr\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-kube-api-access-t69mr\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.923668 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.925226 4603 scope.go:117] "RemoveContainer" containerID="2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df" Sep 30 20:10:29 crc kubenswrapper[4603]: E0930 20:10:29.926288 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df\": container with ID starting with 2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df not found: ID does not exist" containerID="2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.926320 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df"} err="failed to get container status \"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df\": rpc error: code = NotFound desc = could not find container \"2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df\": container with ID starting with 2475b9502ec0f35ffd0a7b39e804d3d243a9d673e0f5dafe95a152c2892539df not found: ID does not exist" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.926350 4603 scope.go:117] "RemoveContainer" containerID="60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7" Sep 30 20:10:29 crc kubenswrapper[4603]: E0930 20:10:29.927214 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7\": container with ID starting with 60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7 not found: ID does not exist" containerID="60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.927242 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7"} err="failed to get container status \"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7\": rpc error: code = NotFound desc = could not find container \"60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7\": container with ID starting with 60ecf9a83b8068f795b90790fc4e4ea6feea85a1af12023ec7364cacbba8f2a7 not found: ID does not exist" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.971596 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "362ae718-7c2c-48c2-9710-bc3731aa6de8" (UID: "362ae718-7c2c-48c2-9710-bc3731aa6de8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.984291 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:29 crc kubenswrapper[4603]: I0930 20:10:29.984323 4603 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/362ae718-7c2c-48c2-9710-bc3731aa6de8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.126215 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.136916 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.156218 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:30 crc kubenswrapper[4603]: E0930 20:10:30.156589 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="rabbitmq" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.156605 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="rabbitmq" Sep 30 20:10:30 crc kubenswrapper[4603]: E0930 20:10:30.156623 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="setup-container" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.156630 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="setup-container" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.156838 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" containerName="rabbitmq" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.157728 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.161344 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.161544 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.161652 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.163422 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.163439 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hxpdc" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.163541 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.164141 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187326 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187381 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187414 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9af798ad-0a37-44c4-960f-d319d2c9f213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187450 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187554 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9af798ad-0a37-44c4-960f-d319d2c9f213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187647 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187753 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187859 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.187886 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.188106 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.188208 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck554\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-kube-api-access-ck554\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.220514 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290310 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290364 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290443 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290494 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck554\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-kube-api-access-ck554\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290536 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290559 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290592 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9af798ad-0a37-44c4-960f-d319d2c9f213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290627 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290686 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9af798ad-0a37-44c4-960f-d319d2c9f213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290713 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290749 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290801 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.290903 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.291272 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.291360 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.291500 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.291738 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9af798ad-0a37-44c4-960f-d319d2c9f213-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.296014 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9af798ad-0a37-44c4-960f-d319d2c9f213-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.296184 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.296684 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9af798ad-0a37-44c4-960f-d319d2c9f213-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.297698 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.314082 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck554\" (UniqueName: \"kubernetes.io/projected/9af798ad-0a37-44c4-960f-d319d2c9f213-kube-api-access-ck554\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.329698 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9af798ad-0a37-44c4-960f-d319d2c9f213\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.472565 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.775866 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a887c33-ea6b-49e0-ade6-cc9df164ff32" path="/var/lib/kubelet/pods/0a887c33-ea6b-49e0-ade6-cc9df164ff32/volumes" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.804634 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="362ae718-7c2c-48c2-9710-bc3731aa6de8" path="/var/lib/kubelet/pods/362ae718-7c2c-48c2-9710-bc3731aa6de8/volumes" Sep 30 20:10:30 crc kubenswrapper[4603]: I0930 20:10:30.970628 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:10:31 crc kubenswrapper[4603]: I0930 20:10:31.824666 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c52e492d-a859-4989-b9f6-91d03979296b","Type":"ContainerStarted","Data":"563a3037e59cc01136e594fdceb1692e33c007c1135fae82ca89df3ee940834f"} Sep 30 20:10:31 crc kubenswrapper[4603]: I0930 20:10:31.826273 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9af798ad-0a37-44c4-960f-d319d2c9f213","Type":"ContainerStarted","Data":"ae2be6e4afbddef5cfe88d41f20a7d202bd284f772a8446eebea138f699fc8c7"} Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.012532 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.014463 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.017777 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.030890 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117075 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117237 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117392 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117555 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117645 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68fkx\" (UniqueName: \"kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117720 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.117757 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.218792 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68fkx\" (UniqueName: \"kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.218849 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.218876 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.218928 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.218969 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.219024 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.219095 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.219767 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.219869 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.220822 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.220910 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.221002 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.221424 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.334644 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68fkx\" (UniqueName: \"kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx\") pod \"dnsmasq-dns-5576978c7c-7rj6r\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.632427 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:32 crc kubenswrapper[4603]: I0930 20:10:32.879353 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9af798ad-0a37-44c4-960f-d319d2c9f213","Type":"ContainerStarted","Data":"08510c13c2674f2f516810ee5491b1e4823dfd2be100e7b217092d3182860aff"} Sep 30 20:10:33 crc kubenswrapper[4603]: I0930 20:10:33.257495 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:33 crc kubenswrapper[4603]: W0930 20:10:33.268987 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93e1fcce_3dd5_443d_8a24_4c937fb36f69.slice/crio-5a1e5737f4c159bcc1d5dcd36c2664f575cf180ee9c42e0dc8d161d7c89739ca WatchSource:0}: Error finding container 5a1e5737f4c159bcc1d5dcd36c2664f575cf180ee9c42e0dc8d161d7c89739ca: Status 404 returned error can't find the container with id 5a1e5737f4c159bcc1d5dcd36c2664f575cf180ee9c42e0dc8d161d7c89739ca Sep 30 20:10:33 crc kubenswrapper[4603]: I0930 20:10:33.904982 4603 generic.go:334] "Generic (PLEG): container finished" podID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerID="be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d" exitCode=0 Sep 30 20:10:33 crc kubenswrapper[4603]: I0930 20:10:33.905236 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" event={"ID":"93e1fcce-3dd5-443d-8a24-4c937fb36f69","Type":"ContainerDied","Data":"be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d"} Sep 30 20:10:33 crc kubenswrapper[4603]: I0930 20:10:33.905698 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" event={"ID":"93e1fcce-3dd5-443d-8a24-4c937fb36f69","Type":"ContainerStarted","Data":"5a1e5737f4c159bcc1d5dcd36c2664f575cf180ee9c42e0dc8d161d7c89739ca"} Sep 30 20:10:34 crc kubenswrapper[4603]: I0930 20:10:34.920344 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" event={"ID":"93e1fcce-3dd5-443d-8a24-4c937fb36f69","Type":"ContainerStarted","Data":"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9"} Sep 30 20:10:34 crc kubenswrapper[4603]: I0930 20:10:34.921322 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:34 crc kubenswrapper[4603]: I0930 20:10:34.948578 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" podStartSLOduration=3.948559237 podStartE2EDuration="3.948559237s" podCreationTimestamp="2025-09-30 20:10:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:10:34.94650728 +0000 UTC m=+1436.884966138" watchObservedRunningTime="2025-09-30 20:10:34.948559237 +0000 UTC m=+1436.887018065" Sep 30 20:10:35 crc kubenswrapper[4603]: I0930 20:10:35.460502 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:10:35 crc kubenswrapper[4603]: I0930 20:10:35.531664 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:10:36 crc kubenswrapper[4603]: I0930 20:10:36.253342 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:10:36 crc kubenswrapper[4603]: I0930 20:10:36.945802 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nlw6j" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" containerID="cri-o://cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70" gracePeriod=2 Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.411057 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.534512 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities\") pod \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.534604 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content\") pod \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.534634 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87l5x\" (UniqueName: \"kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x\") pod \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\" (UID: \"45108d4e-14c1-4d24-99d5-fc395f5e14fd\") " Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.536046 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities" (OuterVolumeSpecName: "utilities") pod "45108d4e-14c1-4d24-99d5-fc395f5e14fd" (UID: "45108d4e-14c1-4d24-99d5-fc395f5e14fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.541386 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x" (OuterVolumeSpecName: "kube-api-access-87l5x") pod "45108d4e-14c1-4d24-99d5-fc395f5e14fd" (UID: "45108d4e-14c1-4d24-99d5-fc395f5e14fd"). InnerVolumeSpecName "kube-api-access-87l5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.608432 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "45108d4e-14c1-4d24-99d5-fc395f5e14fd" (UID: "45108d4e-14c1-4d24-99d5-fc395f5e14fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.636959 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.636989 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87l5x\" (UniqueName: \"kubernetes.io/projected/45108d4e-14c1-4d24-99d5-fc395f5e14fd-kube-api-access-87l5x\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.637002 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45108d4e-14c1-4d24-99d5-fc395f5e14fd-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.976641 4603 generic.go:334] "Generic (PLEG): container finished" podID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerID="cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70" exitCode=0 Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.976709 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerDied","Data":"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70"} Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.976766 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlw6j" event={"ID":"45108d4e-14c1-4d24-99d5-fc395f5e14fd","Type":"ContainerDied","Data":"1a8b458c298001725d71762b8215b4231492081b7a76303daefaef261e7f217f"} Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.976792 4603 scope.go:117] "RemoveContainer" containerID="cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70" Sep 30 20:10:37 crc kubenswrapper[4603]: I0930 20:10:37.977111 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlw6j" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.010388 4603 scope.go:117] "RemoveContainer" containerID="ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.040884 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.050060 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nlw6j"] Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.056777 4603 scope.go:117] "RemoveContainer" containerID="1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.092065 4603 scope.go:117] "RemoveContainer" containerID="cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70" Sep 30 20:10:38 crc kubenswrapper[4603]: E0930 20:10:38.092498 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70\": container with ID starting with cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70 not found: ID does not exist" containerID="cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.092536 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70"} err="failed to get container status \"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70\": rpc error: code = NotFound desc = could not find container \"cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70\": container with ID starting with cab65bd4ed176be27a147759b8ac2d6b6c7cb86daf238d961b928605a9d80b70 not found: ID does not exist" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.092560 4603 scope.go:117] "RemoveContainer" containerID="ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f" Sep 30 20:10:38 crc kubenswrapper[4603]: E0930 20:10:38.093268 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f\": container with ID starting with ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f not found: ID does not exist" containerID="ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.093308 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f"} err="failed to get container status \"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f\": rpc error: code = NotFound desc = could not find container \"ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f\": container with ID starting with ad6c0c1e1ecbbb9877514f0cf6104d39ee8840d4dc11af34f3963368d75d7d8f not found: ID does not exist" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.093329 4603 scope.go:117] "RemoveContainer" containerID="1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117" Sep 30 20:10:38 crc kubenswrapper[4603]: E0930 20:10:38.093576 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117\": container with ID starting with 1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117 not found: ID does not exist" containerID="1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.093608 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117"} err="failed to get container status \"1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117\": rpc error: code = NotFound desc = could not find container \"1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117\": container with ID starting with 1cb80a6d6ad55cf3f8dc3af2e996b615679b670ccb933eeb833ddbe2e50e4117 not found: ID does not exist" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.441864 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.441946 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:10:38 crc kubenswrapper[4603]: I0930 20:10:38.783649 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" path="/var/lib/kubelet/pods/45108d4e-14c1-4d24-99d5-fc395f5e14fd/volumes" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.634366 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.741552 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.741919 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="dnsmasq-dns" containerID="cri-o://39f4b5b1f6662f6c2b2dc3bd4b72aa50b95bfc41538e57108788a27fcbaa2124" gracePeriod=10 Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.956663 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-xkkjn"] Sep 30 20:10:42 crc kubenswrapper[4603]: E0930 20:10:42.959804 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="extract-utilities" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.959945 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="extract-utilities" Sep 30 20:10:42 crc kubenswrapper[4603]: E0930 20:10:42.960027 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.960079 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" Sep 30 20:10:42 crc kubenswrapper[4603]: E0930 20:10:42.960148 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="extract-content" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.960237 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="extract-content" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.960480 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="45108d4e-14c1-4d24-99d5-fc395f5e14fd" containerName="registry-server" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.961725 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:42 crc kubenswrapper[4603]: I0930 20:10:42.971073 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-xkkjn"] Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.038605 4603 generic.go:334] "Generic (PLEG): container finished" podID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerID="39f4b5b1f6662f6c2b2dc3bd4b72aa50b95bfc41538e57108788a27fcbaa2124" exitCode=0 Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.038643 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" event={"ID":"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c","Type":"ContainerDied","Data":"39f4b5b1f6662f6c2b2dc3bd4b72aa50b95bfc41538e57108788a27fcbaa2124"} Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.055962 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056039 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-config\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056097 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056117 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056185 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-svc\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056302 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-474q4\" (UniqueName: \"kubernetes.io/projected/c8abae1f-7c59-4d4d-ad61-30628cb8871d-kube-api-access-474q4\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.056324 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161464 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-config\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161548 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161570 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161609 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-svc\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161675 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-474q4\" (UniqueName: \"kubernetes.io/projected/c8abae1f-7c59-4d4d-ad61-30628cb8871d-kube-api-access-474q4\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161697 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.161734 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.162611 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.163101 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-config\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.163590 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.164049 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.164561 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-dns-svc\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.170656 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8abae1f-7c59-4d4d-ad61-30628cb8871d-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.187967 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-474q4\" (UniqueName: \"kubernetes.io/projected/c8abae1f-7c59-4d4d-ad61-30628cb8871d-kube-api-access-474q4\") pod \"dnsmasq-dns-667c9c995c-xkkjn\" (UID: \"c8abae1f-7c59-4d4d-ad61-30628cb8871d\") " pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.288106 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.395610 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.465931 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98qgp\" (UniqueName: \"kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.465997 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.466684 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.466817 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.466926 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.466984 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc\") pod \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\" (UID: \"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c\") " Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.479693 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp" (OuterVolumeSpecName: "kube-api-access-98qgp") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "kube-api-access-98qgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.529364 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config" (OuterVolumeSpecName: "config") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.540734 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.544881 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.554959 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.568981 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.569014 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.569024 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98qgp\" (UniqueName: \"kubernetes.io/projected/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-kube-api-access-98qgp\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.569035 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.569045 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.571637 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" (UID: "d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.670514 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:43 crc kubenswrapper[4603]: I0930 20:10:43.829268 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-xkkjn"] Sep 30 20:10:43 crc kubenswrapper[4603]: W0930 20:10:43.835849 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8abae1f_7c59_4d4d_ad61_30628cb8871d.slice/crio-563ce72348c3831559672b17e531df16485325e2d6219f13803fe2d52b2ed67f WatchSource:0}: Error finding container 563ce72348c3831559672b17e531df16485325e2d6219f13803fe2d52b2ed67f: Status 404 returned error can't find the container with id 563ce72348c3831559672b17e531df16485325e2d6219f13803fe2d52b2ed67f Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.053828 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" event={"ID":"d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c","Type":"ContainerDied","Data":"8fcb234729854fbd247f00069c0a88c699ce44d883fe0bf5386302ba0d358241"} Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.054303 4603 scope.go:117] "RemoveContainer" containerID="39f4b5b1f6662f6c2b2dc3bd4b72aa50b95bfc41538e57108788a27fcbaa2124" Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.054353 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-pdvdd" Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.056016 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" event={"ID":"c8abae1f-7c59-4d4d-ad61-30628cb8871d","Type":"ContainerStarted","Data":"563ce72348c3831559672b17e531df16485325e2d6219f13803fe2d52b2ed67f"} Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.088498 4603 scope.go:117] "RemoveContainer" containerID="91bf0eb5be3de5a9e3d9d7979d6fdc7a1d445dfa4bbe1f08d52961796eeaf6aa" Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.091171 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.098860 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-pdvdd"] Sep 30 20:10:44 crc kubenswrapper[4603]: I0930 20:10:44.779462 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" path="/var/lib/kubelet/pods/d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c/volumes" Sep 30 20:10:45 crc kubenswrapper[4603]: I0930 20:10:45.067324 4603 generic.go:334] "Generic (PLEG): container finished" podID="c8abae1f-7c59-4d4d-ad61-30628cb8871d" containerID="f525a428e4c98ee1da45c0add98f52ad4fe1312477b9a6773edabdf8cd4b5e85" exitCode=0 Sep 30 20:10:45 crc kubenswrapper[4603]: I0930 20:10:45.067364 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" event={"ID":"c8abae1f-7c59-4d4d-ad61-30628cb8871d","Type":"ContainerDied","Data":"f525a428e4c98ee1da45c0add98f52ad4fe1312477b9a6773edabdf8cd4b5e85"} Sep 30 20:10:46 crc kubenswrapper[4603]: I0930 20:10:46.083470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" event={"ID":"c8abae1f-7c59-4d4d-ad61-30628cb8871d","Type":"ContainerStarted","Data":"f649943238835f4214c4539420ea605014064c818ec7790ac1895769ab9cad82"} Sep 30 20:10:46 crc kubenswrapper[4603]: I0930 20:10:46.083949 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:46 crc kubenswrapper[4603]: I0930 20:10:46.109940 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" podStartSLOduration=4.109925508 podStartE2EDuration="4.109925508s" podCreationTimestamp="2025-09-30 20:10:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:10:46.106836652 +0000 UTC m=+1448.045295480" watchObservedRunningTime="2025-09-30 20:10:46.109925508 +0000 UTC m=+1448.048384326" Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.290406 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667c9c995c-xkkjn" Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.410908 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.411277 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="dnsmasq-dns" containerID="cri-o://e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9" gracePeriod=10 Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.873443 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.985948 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.985996 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68fkx\" (UniqueName: \"kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.986076 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.986132 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.986164 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.986237 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:53 crc kubenswrapper[4603]: I0930 20:10:53.986289 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.006074 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx" (OuterVolumeSpecName: "kube-api-access-68fkx") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "kube-api-access-68fkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.055508 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.057952 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.067756 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.088225 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.088795 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") pod \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\" (UID: \"93e1fcce-3dd5-443d-8a24-4c937fb36f69\") " Sep 30 20:10:54 crc kubenswrapper[4603]: W0930 20:10:54.088900 4603 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/93e1fcce-3dd5-443d-8a24-4c937fb36f69/volumes/kubernetes.io~configmap/openstack-edpm-ipam Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.088916 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.089333 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.089360 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.089375 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68fkx\" (UniqueName: \"kubernetes.io/projected/93e1fcce-3dd5-443d-8a24-4c937fb36f69-kube-api-access-68fkx\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.089429 4603 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.089443 4603 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.111284 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.114830 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config" (OuterVolumeSpecName: "config") pod "93e1fcce-3dd5-443d-8a24-4c937fb36f69" (UID: "93e1fcce-3dd5-443d-8a24-4c937fb36f69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.158908 4603 generic.go:334] "Generic (PLEG): container finished" podID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerID="e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9" exitCode=0 Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.158959 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.158985 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" event={"ID":"93e1fcce-3dd5-443d-8a24-4c937fb36f69","Type":"ContainerDied","Data":"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9"} Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.159371 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-7rj6r" event={"ID":"93e1fcce-3dd5-443d-8a24-4c937fb36f69","Type":"ContainerDied","Data":"5a1e5737f4c159bcc1d5dcd36c2664f575cf180ee9c42e0dc8d161d7c89739ca"} Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.159401 4603 scope.go:117] "RemoveContainer" containerID="e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.187522 4603 scope.go:117] "RemoveContainer" containerID="be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.190982 4603 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.191100 4603 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e1fcce-3dd5-443d-8a24-4c937fb36f69-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.204255 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.211488 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-7rj6r"] Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.213330 4603 scope.go:117] "RemoveContainer" containerID="e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9" Sep 30 20:10:54 crc kubenswrapper[4603]: E0930 20:10:54.214823 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9\": container with ID starting with e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9 not found: ID does not exist" containerID="e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.214928 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9"} err="failed to get container status \"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9\": rpc error: code = NotFound desc = could not find container \"e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9\": container with ID starting with e3fad6c4357aab910d401560781a1eb5092545c89e69ac4ecd076666965ec6a9 not found: ID does not exist" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.215036 4603 scope.go:117] "RemoveContainer" containerID="be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d" Sep 30 20:10:54 crc kubenswrapper[4603]: E0930 20:10:54.215544 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d\": container with ID starting with be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d not found: ID does not exist" containerID="be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.215620 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d"} err="failed to get container status \"be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d\": rpc error: code = NotFound desc = could not find container \"be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d\": container with ID starting with be71cd5eeda2fbac88cb7ff0029b956d8702c3275691ac709aa8754ef7b3467d not found: ID does not exist" Sep 30 20:10:54 crc kubenswrapper[4603]: I0930 20:10:54.776210 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" path="/var/lib/kubelet/pods/93e1fcce-3dd5-443d-8a24-4c937fb36f69/volumes" Sep 30 20:11:04 crc kubenswrapper[4603]: I0930 20:11:04.264432 4603 generic.go:334] "Generic (PLEG): container finished" podID="c52e492d-a859-4989-b9f6-91d03979296b" containerID="563a3037e59cc01136e594fdceb1692e33c007c1135fae82ca89df3ee940834f" exitCode=0 Sep 30 20:11:04 crc kubenswrapper[4603]: I0930 20:11:04.264591 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c52e492d-a859-4989-b9f6-91d03979296b","Type":"ContainerDied","Data":"563a3037e59cc01136e594fdceb1692e33c007c1135fae82ca89df3ee940834f"} Sep 30 20:11:05 crc kubenswrapper[4603]: I0930 20:11:05.276286 4603 generic.go:334] "Generic (PLEG): container finished" podID="9af798ad-0a37-44c4-960f-d319d2c9f213" containerID="08510c13c2674f2f516810ee5491b1e4823dfd2be100e7b217092d3182860aff" exitCode=0 Sep 30 20:11:05 crc kubenswrapper[4603]: I0930 20:11:05.276381 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9af798ad-0a37-44c4-960f-d319d2c9f213","Type":"ContainerDied","Data":"08510c13c2674f2f516810ee5491b1e4823dfd2be100e7b217092d3182860aff"} Sep 30 20:11:05 crc kubenswrapper[4603]: I0930 20:11:05.280212 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c52e492d-a859-4989-b9f6-91d03979296b","Type":"ContainerStarted","Data":"f4820c33f2c51c2483a338958e440775736f7e330138b80578ef186c57504b76"} Sep 30 20:11:05 crc kubenswrapper[4603]: I0930 20:11:05.280369 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:11:05 crc kubenswrapper[4603]: I0930 20:11:05.331050 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.33103201 podStartE2EDuration="37.33103201s" podCreationTimestamp="2025-09-30 20:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:11:05.326180895 +0000 UTC m=+1467.264639713" watchObservedRunningTime="2025-09-30 20:11:05.33103201 +0000 UTC m=+1467.269490828" Sep 30 20:11:06 crc kubenswrapper[4603]: I0930 20:11:06.290337 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9af798ad-0a37-44c4-960f-d319d2c9f213","Type":"ContainerStarted","Data":"f5124de8a02fa36341fbab231122ec750baa78cd0a1ee5ff934a877dba22b597"} Sep 30 20:11:06 crc kubenswrapper[4603]: I0930 20:11:06.290907 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:11:06 crc kubenswrapper[4603]: I0930 20:11:06.318901 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.318881972 podStartE2EDuration="36.318881972s" podCreationTimestamp="2025-09-30 20:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:11:06.315433066 +0000 UTC m=+1468.253891884" watchObservedRunningTime="2025-09-30 20:11:06.318881972 +0000 UTC m=+1468.257340790" Sep 30 20:11:08 crc kubenswrapper[4603]: I0930 20:11:08.442319 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:11:08 crc kubenswrapper[4603]: I0930 20:11:08.442978 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.385393 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h"] Sep 30 20:11:12 crc kubenswrapper[4603]: E0930 20:11:12.385982 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.385996 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: E0930 20:11:12.386017 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="init" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386022 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="init" Sep 30 20:11:12 crc kubenswrapper[4603]: E0930 20:11:12.386030 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="init" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386035 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="init" Sep 30 20:11:12 crc kubenswrapper[4603]: E0930 20:11:12.386065 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386072 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386267 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d53fd54d-dc39-4d33-9a71-0c0f0f4f7d3c" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386279 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e1fcce-3dd5-443d-8a24-4c937fb36f69" containerName="dnsmasq-dns" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.386839 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.395369 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.395366 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.396839 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.398325 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.424302 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h"] Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.553178 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.553327 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.553415 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ckb5\" (UniqueName: \"kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.553466 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.655641 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ckb5\" (UniqueName: \"kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.655696 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.655804 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.655880 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.662549 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.664707 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.667587 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.687328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ckb5\" (UniqueName: \"kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:12 crc kubenswrapper[4603]: I0930 20:11:12.708216 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:13 crc kubenswrapper[4603]: I0930 20:11:13.574947 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h"] Sep 30 20:11:14 crc kubenswrapper[4603]: I0930 20:11:14.359442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" event={"ID":"b7c30cf9-2ad9-4627-8364-293fec61fef1","Type":"ContainerStarted","Data":"82eaebc25d915e4729aa42687159f82f5313ebe6a0f2f758444e2888abdd734c"} Sep 30 20:11:19 crc kubenswrapper[4603]: I0930 20:11:19.234376 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:11:20 crc kubenswrapper[4603]: I0930 20:11:20.476225 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:11:28 crc kubenswrapper[4603]: I0930 20:11:28.499791 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" event={"ID":"b7c30cf9-2ad9-4627-8364-293fec61fef1","Type":"ContainerStarted","Data":"b8840c0cb0ae1ea578a7c4f03f69128f631f4844291e7118aa2f73004de599b5"} Sep 30 20:11:28 crc kubenswrapper[4603]: I0930 20:11:28.524323 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" podStartSLOduration=1.913443718 podStartE2EDuration="16.524301746s" podCreationTimestamp="2025-09-30 20:11:12 +0000 UTC" firstStartedPulling="2025-09-30 20:11:13.57252234 +0000 UTC m=+1475.510981188" lastFinishedPulling="2025-09-30 20:11:28.183380398 +0000 UTC m=+1490.121839216" observedRunningTime="2025-09-30 20:11:28.516660444 +0000 UTC m=+1490.455119262" watchObservedRunningTime="2025-09-30 20:11:28.524301746 +0000 UTC m=+1490.462760564" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.137619 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.140576 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.170068 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.204443 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5m46\" (UniqueName: \"kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.204498 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.204660 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.307225 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.307517 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5m46\" (UniqueName: \"kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.307567 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.307786 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.308156 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.327634 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5m46\" (UniqueName: \"kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46\") pod \"community-operators-vgg75\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.470539 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:35 crc kubenswrapper[4603]: I0930 20:11:35.929155 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:36 crc kubenswrapper[4603]: I0930 20:11:36.599652 4603 generic.go:334] "Generic (PLEG): container finished" podID="5b05d511-cecc-4844-b0df-78124d6823a7" containerID="cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1" exitCode=0 Sep 30 20:11:36 crc kubenswrapper[4603]: I0930 20:11:36.599710 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerDied","Data":"cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1"} Sep 30 20:11:36 crc kubenswrapper[4603]: I0930 20:11:36.600054 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerStarted","Data":"aef198a8d125962f91010b85f1e5c8bd9b0aa6774e421ae989a8ab7f53866efa"} Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.441763 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.442127 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.442257 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.443564 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.443669 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" gracePeriod=600 Sep 30 20:11:38 crc kubenswrapper[4603]: E0930 20:11:38.587269 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.617787 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerStarted","Data":"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19"} Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.623474 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" exitCode=0 Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.623520 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576"} Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.623554 4603 scope.go:117] "RemoveContainer" containerID="b76a83b08fdc983b6c0da8265767de687c7db0bfd0bf64bb5e99e4465fb6d156" Sep 30 20:11:38 crc kubenswrapper[4603]: I0930 20:11:38.624400 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:11:38 crc kubenswrapper[4603]: E0930 20:11:38.624804 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:11:40 crc kubenswrapper[4603]: I0930 20:11:40.657542 4603 generic.go:334] "Generic (PLEG): container finished" podID="b7c30cf9-2ad9-4627-8364-293fec61fef1" containerID="b8840c0cb0ae1ea578a7c4f03f69128f631f4844291e7118aa2f73004de599b5" exitCode=0 Sep 30 20:11:40 crc kubenswrapper[4603]: I0930 20:11:40.657950 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" event={"ID":"b7c30cf9-2ad9-4627-8364-293fec61fef1","Type":"ContainerDied","Data":"b8840c0cb0ae1ea578a7c4f03f69128f631f4844291e7118aa2f73004de599b5"} Sep 30 20:11:41 crc kubenswrapper[4603]: I0930 20:11:41.672268 4603 generic.go:334] "Generic (PLEG): container finished" podID="5b05d511-cecc-4844-b0df-78124d6823a7" containerID="285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19" exitCode=0 Sep 30 20:11:41 crc kubenswrapper[4603]: I0930 20:11:41.672720 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerDied","Data":"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19"} Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.089509 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.140952 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory\") pod \"b7c30cf9-2ad9-4627-8364-293fec61fef1\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.141066 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle\") pod \"b7c30cf9-2ad9-4627-8364-293fec61fef1\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.141226 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key\") pod \"b7c30cf9-2ad9-4627-8364-293fec61fef1\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.141335 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ckb5\" (UniqueName: \"kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5\") pod \"b7c30cf9-2ad9-4627-8364-293fec61fef1\" (UID: \"b7c30cf9-2ad9-4627-8364-293fec61fef1\") " Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.146619 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5" (OuterVolumeSpecName: "kube-api-access-2ckb5") pod "b7c30cf9-2ad9-4627-8364-293fec61fef1" (UID: "b7c30cf9-2ad9-4627-8364-293fec61fef1"). InnerVolumeSpecName "kube-api-access-2ckb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.150374 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b7c30cf9-2ad9-4627-8364-293fec61fef1" (UID: "b7c30cf9-2ad9-4627-8364-293fec61fef1"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.172263 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory" (OuterVolumeSpecName: "inventory") pod "b7c30cf9-2ad9-4627-8364-293fec61fef1" (UID: "b7c30cf9-2ad9-4627-8364-293fec61fef1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.178491 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b7c30cf9-2ad9-4627-8364-293fec61fef1" (UID: "b7c30cf9-2ad9-4627-8364-293fec61fef1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.243192 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.243225 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ckb5\" (UniqueName: \"kubernetes.io/projected/b7c30cf9-2ad9-4627-8364-293fec61fef1-kube-api-access-2ckb5\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.243237 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.243248 4603 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7c30cf9-2ad9-4627-8364-293fec61fef1-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.691586 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerStarted","Data":"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e"} Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.694506 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" event={"ID":"b7c30cf9-2ad9-4627-8364-293fec61fef1","Type":"ContainerDied","Data":"82eaebc25d915e4729aa42687159f82f5313ebe6a0f2f758444e2888abdd734c"} Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.694563 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82eaebc25d915e4729aa42687159f82f5313ebe6a0f2f758444e2888abdd734c" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.694570 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.724197 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vgg75" podStartSLOduration=2.102728585 podStartE2EDuration="7.724178843s" podCreationTimestamp="2025-09-30 20:11:35 +0000 UTC" firstStartedPulling="2025-09-30 20:11:36.602085722 +0000 UTC m=+1498.540544570" lastFinishedPulling="2025-09-30 20:11:42.22353601 +0000 UTC m=+1504.161994828" observedRunningTime="2025-09-30 20:11:42.723812023 +0000 UTC m=+1504.662270881" watchObservedRunningTime="2025-09-30 20:11:42.724178843 +0000 UTC m=+1504.662637661" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.825716 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9"] Sep 30 20:11:42 crc kubenswrapper[4603]: E0930 20:11:42.826346 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c30cf9-2ad9-4627-8364-293fec61fef1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.826396 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c30cf9-2ad9-4627-8364-293fec61fef1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.826695 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7c30cf9-2ad9-4627-8364-293fec61fef1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.827592 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.830143 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.830512 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.830752 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.832877 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9"] Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.835716 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.854647 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.854959 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7qzp\" (UniqueName: \"kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.855230 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.956865 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.957216 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.957338 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7qzp\" (UniqueName: \"kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.961457 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.961615 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:42 crc kubenswrapper[4603]: I0930 20:11:42.974286 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7qzp\" (UniqueName: \"kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-2xbs9\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:43 crc kubenswrapper[4603]: I0930 20:11:43.152576 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:43 crc kubenswrapper[4603]: I0930 20:11:43.676845 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9"] Sep 30 20:11:43 crc kubenswrapper[4603]: W0930 20:11:43.689268 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b2fc65f_3fea_42f7_903c_22d5ca817ad8.slice/crio-3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0 WatchSource:0}: Error finding container 3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0: Status 404 returned error can't find the container with id 3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0 Sep 30 20:11:43 crc kubenswrapper[4603]: I0930 20:11:43.704321 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" event={"ID":"0b2fc65f-3fea-42f7-903c-22d5ca817ad8","Type":"ContainerStarted","Data":"3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0"} Sep 30 20:11:44 crc kubenswrapper[4603]: I0930 20:11:44.232255 4603 scope.go:117] "RemoveContainer" containerID="8eb42c67af21f2f98dd885ec9c009633d97762be66d4eafea9795211261763ac" Sep 30 20:11:44 crc kubenswrapper[4603]: I0930 20:11:44.259454 4603 scope.go:117] "RemoveContainer" containerID="e52ed40163cd1aff1eb59426b1bc695c43b7b8c073ce298029696eb02cbac8d7" Sep 30 20:11:44 crc kubenswrapper[4603]: I0930 20:11:44.712923 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" event={"ID":"0b2fc65f-3fea-42f7-903c-22d5ca817ad8","Type":"ContainerStarted","Data":"1a3cc660d6cefbd264ea4f27772059e6320f5a14e3ad42efc110e8b68a31636f"} Sep 30 20:11:44 crc kubenswrapper[4603]: I0930 20:11:44.743516 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" podStartSLOduration=2.5617672750000002 podStartE2EDuration="2.743494371s" podCreationTimestamp="2025-09-30 20:11:42 +0000 UTC" firstStartedPulling="2025-09-30 20:11:43.69190277 +0000 UTC m=+1505.630361588" lastFinishedPulling="2025-09-30 20:11:43.873629866 +0000 UTC m=+1505.812088684" observedRunningTime="2025-09-30 20:11:44.738691658 +0000 UTC m=+1506.677150496" watchObservedRunningTime="2025-09-30 20:11:44.743494371 +0000 UTC m=+1506.681953199" Sep 30 20:11:45 crc kubenswrapper[4603]: I0930 20:11:45.470687 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:45 crc kubenswrapper[4603]: I0930 20:11:45.470725 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:45 crc kubenswrapper[4603]: I0930 20:11:45.523662 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:46 crc kubenswrapper[4603]: I0930 20:11:46.737364 4603 generic.go:334] "Generic (PLEG): container finished" podID="0b2fc65f-3fea-42f7-903c-22d5ca817ad8" containerID="1a3cc660d6cefbd264ea4f27772059e6320f5a14e3ad42efc110e8b68a31636f" exitCode=0 Sep 30 20:11:46 crc kubenswrapper[4603]: I0930 20:11:46.737475 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" event={"ID":"0b2fc65f-3fea-42f7-903c-22d5ca817ad8","Type":"ContainerDied","Data":"1a3cc660d6cefbd264ea4f27772059e6320f5a14e3ad42efc110e8b68a31636f"} Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.196363 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.261489 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory\") pod \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.261548 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key\") pod \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.261623 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7qzp\" (UniqueName: \"kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp\") pod \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\" (UID: \"0b2fc65f-3fea-42f7-903c-22d5ca817ad8\") " Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.268968 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp" (OuterVolumeSpecName: "kube-api-access-j7qzp") pod "0b2fc65f-3fea-42f7-903c-22d5ca817ad8" (UID: "0b2fc65f-3fea-42f7-903c-22d5ca817ad8"). InnerVolumeSpecName "kube-api-access-j7qzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.289773 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory" (OuterVolumeSpecName: "inventory") pod "0b2fc65f-3fea-42f7-903c-22d5ca817ad8" (UID: "0b2fc65f-3fea-42f7-903c-22d5ca817ad8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.290269 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0b2fc65f-3fea-42f7-903c-22d5ca817ad8" (UID: "0b2fc65f-3fea-42f7-903c-22d5ca817ad8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.364137 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.364178 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.364189 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7qzp\" (UniqueName: \"kubernetes.io/projected/0b2fc65f-3fea-42f7-903c-22d5ca817ad8-kube-api-access-j7qzp\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.770629 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.782974 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-2xbs9" event={"ID":"0b2fc65f-3fea-42f7-903c-22d5ca817ad8","Type":"ContainerDied","Data":"3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0"} Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.783290 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ec2eb938dc457efda122d63471580f751a3b215b01f6b3828dbfe18221719c0" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.849363 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd"] Sep 30 20:11:48 crc kubenswrapper[4603]: E0930 20:11:48.855781 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b2fc65f-3fea-42f7-903c-22d5ca817ad8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.855820 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b2fc65f-3fea-42f7-903c-22d5ca817ad8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.856109 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b2fc65f-3fea-42f7-903c-22d5ca817ad8" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.856910 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.861152 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.861399 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.861929 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.861976 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.878793 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m9nt\" (UniqueName: \"kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.878866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.878979 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.879106 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.885997 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd"] Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.987199 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.987892 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.988140 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m9nt\" (UniqueName: \"kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.988451 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:48 crc kubenswrapper[4603]: I0930 20:11:48.992590 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.005253 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.009326 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.014424 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m9nt\" (UniqueName: \"kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.179844 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.719759 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd"] Sep 30 20:11:49 crc kubenswrapper[4603]: I0930 20:11:49.765495 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:11:49 crc kubenswrapper[4603]: E0930 20:11:49.765713 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:11:50 crc kubenswrapper[4603]: I0930 20:11:50.787792 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" event={"ID":"d826ad98-bfbb-4355-b0a2-c7ea9715b990","Type":"ContainerStarted","Data":"68c2c87a36caa9b0f6c75b5c0f0e7d7a0b8614ac29674361de6514a4d4b9e2fd"} Sep 30 20:11:50 crc kubenswrapper[4603]: I0930 20:11:50.788109 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" event={"ID":"d826ad98-bfbb-4355-b0a2-c7ea9715b990","Type":"ContainerStarted","Data":"62144a64357d04b93f60e7996e13dd3fa8980f84f3e9e982311f85e0776e6cc5"} Sep 30 20:11:50 crc kubenswrapper[4603]: I0930 20:11:50.810773 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" podStartSLOduration=2.622487316 podStartE2EDuration="2.810748873s" podCreationTimestamp="2025-09-30 20:11:48 +0000 UTC" firstStartedPulling="2025-09-30 20:11:49.773806978 +0000 UTC m=+1511.712265796" lastFinishedPulling="2025-09-30 20:11:49.962068525 +0000 UTC m=+1511.900527353" observedRunningTime="2025-09-30 20:11:50.802804242 +0000 UTC m=+1512.741263070" watchObservedRunningTime="2025-09-30 20:11:50.810748873 +0000 UTC m=+1512.749207711" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.386944 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.393108 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.406488 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.432267 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.432341 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.432555 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvpsq\" (UniqueName: \"kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.527724 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.535135 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.535467 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.535605 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvpsq\" (UniqueName: \"kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.537961 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.538037 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.562796 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvpsq\" (UniqueName: \"kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq\") pod \"certified-operators-6qz88\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:55 crc kubenswrapper[4603]: I0930 20:11:55.734940 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:11:56 crc kubenswrapper[4603]: I0930 20:11:56.214628 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:11:56 crc kubenswrapper[4603]: I0930 20:11:56.872530 4603 generic.go:334] "Generic (PLEG): container finished" podID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerID="6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3" exitCode=0 Sep 30 20:11:56 crc kubenswrapper[4603]: I0930 20:11:56.872652 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerDied","Data":"6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3"} Sep 30 20:11:56 crc kubenswrapper[4603]: I0930 20:11:56.872814 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerStarted","Data":"a2c4626d6d4238a7d05fee2c10ebd4e281ef12277f73a051e3a6d858dbed6efe"} Sep 30 20:11:57 crc kubenswrapper[4603]: I0930 20:11:57.941663 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:57 crc kubenswrapper[4603]: I0930 20:11:57.942504 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vgg75" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="registry-server" containerID="cri-o://17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e" gracePeriod=2 Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.394551 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.593942 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content\") pod \"5b05d511-cecc-4844-b0df-78124d6823a7\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.594000 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities\") pod \"5b05d511-cecc-4844-b0df-78124d6823a7\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.594078 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5m46\" (UniqueName: \"kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46\") pod \"5b05d511-cecc-4844-b0df-78124d6823a7\" (UID: \"5b05d511-cecc-4844-b0df-78124d6823a7\") " Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.595305 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities" (OuterVolumeSpecName: "utilities") pod "5b05d511-cecc-4844-b0df-78124d6823a7" (UID: "5b05d511-cecc-4844-b0df-78124d6823a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.599368 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46" (OuterVolumeSpecName: "kube-api-access-f5m46") pod "5b05d511-cecc-4844-b0df-78124d6823a7" (UID: "5b05d511-cecc-4844-b0df-78124d6823a7"). InnerVolumeSpecName "kube-api-access-f5m46". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.647862 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b05d511-cecc-4844-b0df-78124d6823a7" (UID: "5b05d511-cecc-4844-b0df-78124d6823a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.695920 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.695957 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b05d511-cecc-4844-b0df-78124d6823a7-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.695970 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5m46\" (UniqueName: \"kubernetes.io/projected/5b05d511-cecc-4844-b0df-78124d6823a7-kube-api-access-f5m46\") on node \"crc\" DevicePath \"\"" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.898699 4603 generic.go:334] "Generic (PLEG): container finished" podID="5b05d511-cecc-4844-b0df-78124d6823a7" containerID="17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e" exitCode=0 Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.898783 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerDied","Data":"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e"} Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.898813 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgg75" event={"ID":"5b05d511-cecc-4844-b0df-78124d6823a7","Type":"ContainerDied","Data":"aef198a8d125962f91010b85f1e5c8bd9b0aa6774e421ae989a8ab7f53866efa"} Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.898834 4603 scope.go:117] "RemoveContainer" containerID="17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.899275 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgg75" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.903715 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerStarted","Data":"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc"} Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.938263 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.950314 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vgg75"] Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.964867 4603 scope.go:117] "RemoveContainer" containerID="285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19" Sep 30 20:11:58 crc kubenswrapper[4603]: I0930 20:11:58.990871 4603 scope.go:117] "RemoveContainer" containerID="cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.031817 4603 scope.go:117] "RemoveContainer" containerID="17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e" Sep 30 20:11:59 crc kubenswrapper[4603]: E0930 20:11:59.032307 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e\": container with ID starting with 17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e not found: ID does not exist" containerID="17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.032361 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e"} err="failed to get container status \"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e\": rpc error: code = NotFound desc = could not find container \"17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e\": container with ID starting with 17fd68d8ab962e86337e8fbf60592526cbfbf3bc5609c928d722c245ac9cec9e not found: ID does not exist" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.032395 4603 scope.go:117] "RemoveContainer" containerID="285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19" Sep 30 20:11:59 crc kubenswrapper[4603]: E0930 20:11:59.032809 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19\": container with ID starting with 285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19 not found: ID does not exist" containerID="285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.032836 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19"} err="failed to get container status \"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19\": rpc error: code = NotFound desc = could not find container \"285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19\": container with ID starting with 285ed8630430165f2846d4785609bf6755cd1bd15268c3760eee05f932ae7b19 not found: ID does not exist" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.032851 4603 scope.go:117] "RemoveContainer" containerID="cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1" Sep 30 20:11:59 crc kubenswrapper[4603]: E0930 20:11:59.033289 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1\": container with ID starting with cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1 not found: ID does not exist" containerID="cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.033325 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1"} err="failed to get container status \"cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1\": rpc error: code = NotFound desc = could not find container \"cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1\": container with ID starting with cc875d1c60a042cb81add82868ba36a03b2c2449c72d9ba7ab45759adde3f8e1 not found: ID does not exist" Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.918732 4603 generic.go:334] "Generic (PLEG): container finished" podID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerID="2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc" exitCode=0 Sep 30 20:11:59 crc kubenswrapper[4603]: I0930 20:11:59.918853 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerDied","Data":"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc"} Sep 30 20:12:00 crc kubenswrapper[4603]: I0930 20:12:00.782159 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" path="/var/lib/kubelet/pods/5b05d511-cecc-4844-b0df-78124d6823a7/volumes" Sep 30 20:12:01 crc kubenswrapper[4603]: I0930 20:12:01.978087 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerStarted","Data":"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f"} Sep 30 20:12:01 crc kubenswrapper[4603]: I0930 20:12:01.997078 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6qz88" podStartSLOduration=2.8920529950000002 podStartE2EDuration="6.997061041s" podCreationTimestamp="2025-09-30 20:11:55 +0000 UTC" firstStartedPulling="2025-09-30 20:11:56.874133168 +0000 UTC m=+1518.812591986" lastFinishedPulling="2025-09-30 20:12:00.979141214 +0000 UTC m=+1522.917600032" observedRunningTime="2025-09-30 20:12:01.995480338 +0000 UTC m=+1523.933939166" watchObservedRunningTime="2025-09-30 20:12:01.997061041 +0000 UTC m=+1523.935519859" Sep 30 20:12:02 crc kubenswrapper[4603]: I0930 20:12:02.764424 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:12:02 crc kubenswrapper[4603]: E0930 20:12:02.765127 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:12:05 crc kubenswrapper[4603]: I0930 20:12:05.735135 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:05 crc kubenswrapper[4603]: I0930 20:12:05.737250 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:06 crc kubenswrapper[4603]: I0930 20:12:06.791264 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-6qz88" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="registry-server" probeResult="failure" output=< Sep 30 20:12:06 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:12:06 crc kubenswrapper[4603]: > Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.923073 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:09 crc kubenswrapper[4603]: E0930 20:12:09.923970 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="registry-server" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.923984 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="registry-server" Sep 30 20:12:09 crc kubenswrapper[4603]: E0930 20:12:09.923999 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="extract-content" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.924005 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="extract-content" Sep 30 20:12:09 crc kubenswrapper[4603]: E0930 20:12:09.924018 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="extract-utilities" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.924026 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="extract-utilities" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.924232 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b05d511-cecc-4844-b0df-78124d6823a7" containerName="registry-server" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.928610 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:09 crc kubenswrapper[4603]: I0930 20:12:09.942239 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.002376 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-868tl\" (UniqueName: \"kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.002434 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.002554 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.104100 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.104405 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-868tl\" (UniqueName: \"kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.104447 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.104557 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.105138 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.128548 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-868tl\" (UniqueName: \"kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl\") pod \"redhat-marketplace-jjgmz\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.248594 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:10 crc kubenswrapper[4603]: I0930 20:12:10.773997 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:10 crc kubenswrapper[4603]: W0930 20:12:10.781300 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0a69451_8454_4635_a645_f8672059a224.slice/crio-4ef427dfea383b88b924f8beaa6d74cab819b5118b5053ddab9541ed9a1ef31e WatchSource:0}: Error finding container 4ef427dfea383b88b924f8beaa6d74cab819b5118b5053ddab9541ed9a1ef31e: Status 404 returned error can't find the container with id 4ef427dfea383b88b924f8beaa6d74cab819b5118b5053ddab9541ed9a1ef31e Sep 30 20:12:11 crc kubenswrapper[4603]: I0930 20:12:11.082639 4603 generic.go:334] "Generic (PLEG): container finished" podID="a0a69451-8454-4635-a645-f8672059a224" containerID="b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23" exitCode=0 Sep 30 20:12:11 crc kubenswrapper[4603]: I0930 20:12:11.082709 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerDied","Data":"b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23"} Sep 30 20:12:11 crc kubenswrapper[4603]: I0930 20:12:11.082756 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerStarted","Data":"4ef427dfea383b88b924f8beaa6d74cab819b5118b5053ddab9541ed9a1ef31e"} Sep 30 20:12:12 crc kubenswrapper[4603]: I0930 20:12:12.094092 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerStarted","Data":"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be"} Sep 30 20:12:13 crc kubenswrapper[4603]: I0930 20:12:13.105692 4603 generic.go:334] "Generic (PLEG): container finished" podID="a0a69451-8454-4635-a645-f8672059a224" containerID="801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be" exitCode=0 Sep 30 20:12:13 crc kubenswrapper[4603]: I0930 20:12:13.105738 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerDied","Data":"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be"} Sep 30 20:12:14 crc kubenswrapper[4603]: I0930 20:12:14.117400 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerStarted","Data":"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee"} Sep 30 20:12:14 crc kubenswrapper[4603]: I0930 20:12:14.142588 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jjgmz" podStartSLOduration=2.705324911 podStartE2EDuration="5.14256173s" podCreationTimestamp="2025-09-30 20:12:09 +0000 UTC" firstStartedPulling="2025-09-30 20:12:11.08570049 +0000 UTC m=+1533.024159308" lastFinishedPulling="2025-09-30 20:12:13.522937309 +0000 UTC m=+1535.461396127" observedRunningTime="2025-09-30 20:12:14.134131206 +0000 UTC m=+1536.072590034" watchObservedRunningTime="2025-09-30 20:12:14.14256173 +0000 UTC m=+1536.081020548" Sep 30 20:12:15 crc kubenswrapper[4603]: I0930 20:12:15.814772 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:15 crc kubenswrapper[4603]: I0930 20:12:15.887834 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.295318 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.295874 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6qz88" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="registry-server" containerID="cri-o://5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f" gracePeriod=2 Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.751548 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.763895 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:12:17 crc kubenswrapper[4603]: E0930 20:12:17.764232 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.857149 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities\") pod \"af45e21d-fc9b-4a25-beaf-901168bf78da\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.857327 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content\") pod \"af45e21d-fc9b-4a25-beaf-901168bf78da\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.857392 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvpsq\" (UniqueName: \"kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq\") pod \"af45e21d-fc9b-4a25-beaf-901168bf78da\" (UID: \"af45e21d-fc9b-4a25-beaf-901168bf78da\") " Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.857985 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities" (OuterVolumeSpecName: "utilities") pod "af45e21d-fc9b-4a25-beaf-901168bf78da" (UID: "af45e21d-fc9b-4a25-beaf-901168bf78da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.862868 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq" (OuterVolumeSpecName: "kube-api-access-pvpsq") pod "af45e21d-fc9b-4a25-beaf-901168bf78da" (UID: "af45e21d-fc9b-4a25-beaf-901168bf78da"). InnerVolumeSpecName "kube-api-access-pvpsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.898005 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af45e21d-fc9b-4a25-beaf-901168bf78da" (UID: "af45e21d-fc9b-4a25-beaf-901168bf78da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.960666 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.960705 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af45e21d-fc9b-4a25-beaf-901168bf78da-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:17 crc kubenswrapper[4603]: I0930 20:12:17.960720 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvpsq\" (UniqueName: \"kubernetes.io/projected/af45e21d-fc9b-4a25-beaf-901168bf78da-kube-api-access-pvpsq\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.154938 4603 generic.go:334] "Generic (PLEG): container finished" podID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerID="5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f" exitCode=0 Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.154978 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerDied","Data":"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f"} Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.155025 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6qz88" event={"ID":"af45e21d-fc9b-4a25-beaf-901168bf78da","Type":"ContainerDied","Data":"a2c4626d6d4238a7d05fee2c10ebd4e281ef12277f73a051e3a6d858dbed6efe"} Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.155031 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6qz88" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.155042 4603 scope.go:117] "RemoveContainer" containerID="5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.197133 4603 scope.go:117] "RemoveContainer" containerID="2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.221006 4603 scope.go:117] "RemoveContainer" containerID="6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.257211 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.265550 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6qz88"] Sep 30 20:12:18 crc kubenswrapper[4603]: E0930 20:12:18.282355 4603 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf45e21d_fc9b_4a25_beaf_901168bf78da.slice/crio-a2c4626d6d4238a7d05fee2c10ebd4e281ef12277f73a051e3a6d858dbed6efe\": RecentStats: unable to find data in memory cache]" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.298526 4603 scope.go:117] "RemoveContainer" containerID="5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f" Sep 30 20:12:18 crc kubenswrapper[4603]: E0930 20:12:18.299651 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f\": container with ID starting with 5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f not found: ID does not exist" containerID="5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.299685 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f"} err="failed to get container status \"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f\": rpc error: code = NotFound desc = could not find container \"5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f\": container with ID starting with 5b6e74e390dc319c4402bee86c27f7063cdcde26f722d07970e0a52057cf170f not found: ID does not exist" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.299706 4603 scope.go:117] "RemoveContainer" containerID="2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc" Sep 30 20:12:18 crc kubenswrapper[4603]: E0930 20:12:18.299963 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc\": container with ID starting with 2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc not found: ID does not exist" containerID="2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.299981 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc"} err="failed to get container status \"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc\": rpc error: code = NotFound desc = could not find container \"2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc\": container with ID starting with 2b2500131ad9251c7a809823db3e90a61e373fad44ceb787df531d0004bff7cc not found: ID does not exist" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.299992 4603 scope.go:117] "RemoveContainer" containerID="6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3" Sep 30 20:12:18 crc kubenswrapper[4603]: E0930 20:12:18.300257 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3\": container with ID starting with 6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3 not found: ID does not exist" containerID="6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.300275 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3"} err="failed to get container status \"6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3\": rpc error: code = NotFound desc = could not find container \"6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3\": container with ID starting with 6e84070419097f17bf962109687dfd2324c623e6b612c414d77f1a6de803ebd3 not found: ID does not exist" Sep 30 20:12:18 crc kubenswrapper[4603]: I0930 20:12:18.778609 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" path="/var/lib/kubelet/pods/af45e21d-fc9b-4a25-beaf-901168bf78da/volumes" Sep 30 20:12:20 crc kubenswrapper[4603]: I0930 20:12:20.248787 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:20 crc kubenswrapper[4603]: I0930 20:12:20.249160 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:20 crc kubenswrapper[4603]: I0930 20:12:20.331856 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:21 crc kubenswrapper[4603]: I0930 20:12:21.234047 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:21 crc kubenswrapper[4603]: I0930 20:12:21.502652 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.199469 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jjgmz" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="registry-server" containerID="cri-o://416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee" gracePeriod=2 Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.659187 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.781330 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content\") pod \"a0a69451-8454-4635-a645-f8672059a224\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.781372 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities\") pod \"a0a69451-8454-4635-a645-f8672059a224\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.781415 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-868tl\" (UniqueName: \"kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl\") pod \"a0a69451-8454-4635-a645-f8672059a224\" (UID: \"a0a69451-8454-4635-a645-f8672059a224\") " Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.782185 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities" (OuterVolumeSpecName: "utilities") pod "a0a69451-8454-4635-a645-f8672059a224" (UID: "a0a69451-8454-4635-a645-f8672059a224"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.787220 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl" (OuterVolumeSpecName: "kube-api-access-868tl") pod "a0a69451-8454-4635-a645-f8672059a224" (UID: "a0a69451-8454-4635-a645-f8672059a224"). InnerVolumeSpecName "kube-api-access-868tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.797605 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0a69451-8454-4635-a645-f8672059a224" (UID: "a0a69451-8454-4635-a645-f8672059a224"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.883074 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.883100 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0a69451-8454-4635-a645-f8672059a224-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:23 crc kubenswrapper[4603]: I0930 20:12:23.883109 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-868tl\" (UniqueName: \"kubernetes.io/projected/a0a69451-8454-4635-a645-f8672059a224-kube-api-access-868tl\") on node \"crc\" DevicePath \"\"" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.210923 4603 generic.go:334] "Generic (PLEG): container finished" podID="a0a69451-8454-4635-a645-f8672059a224" containerID="416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee" exitCode=0 Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.210981 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerDied","Data":"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee"} Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.210985 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jjgmz" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.211007 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jjgmz" event={"ID":"a0a69451-8454-4635-a645-f8672059a224","Type":"ContainerDied","Data":"4ef427dfea383b88b924f8beaa6d74cab819b5118b5053ddab9541ed9a1ef31e"} Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.211026 4603 scope.go:117] "RemoveContainer" containerID="416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.251122 4603 scope.go:117] "RemoveContainer" containerID="801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.258254 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.270319 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jjgmz"] Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.271283 4603 scope.go:117] "RemoveContainer" containerID="b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.320443 4603 scope.go:117] "RemoveContainer" containerID="416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee" Sep 30 20:12:24 crc kubenswrapper[4603]: E0930 20:12:24.320835 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee\": container with ID starting with 416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee not found: ID does not exist" containerID="416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.320871 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee"} err="failed to get container status \"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee\": rpc error: code = NotFound desc = could not find container \"416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee\": container with ID starting with 416af7e1974932f0f3687f97fde9090f1bb5a947315832216f7d1d407dacd0ee not found: ID does not exist" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.320897 4603 scope.go:117] "RemoveContainer" containerID="801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be" Sep 30 20:12:24 crc kubenswrapper[4603]: E0930 20:12:24.321120 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be\": container with ID starting with 801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be not found: ID does not exist" containerID="801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.321154 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be"} err="failed to get container status \"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be\": rpc error: code = NotFound desc = could not find container \"801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be\": container with ID starting with 801904777de9d3586fee4a0477b866b29ac453ea9f24ac5727cbc7f3b96869be not found: ID does not exist" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.321257 4603 scope.go:117] "RemoveContainer" containerID="b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23" Sep 30 20:12:24 crc kubenswrapper[4603]: E0930 20:12:24.321515 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23\": container with ID starting with b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23 not found: ID does not exist" containerID="b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.321542 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23"} err="failed to get container status \"b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23\": rpc error: code = NotFound desc = could not find container \"b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23\": container with ID starting with b33b05f69d86ef07f96dd206b92745a8ea14d8b632c03a183f62d2aed1c93e23 not found: ID does not exist" Sep 30 20:12:24 crc kubenswrapper[4603]: I0930 20:12:24.779692 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a69451-8454-4635-a645-f8672059a224" path="/var/lib/kubelet/pods/a0a69451-8454-4635-a645-f8672059a224/volumes" Sep 30 20:12:30 crc kubenswrapper[4603]: I0930 20:12:30.765264 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:12:30 crc kubenswrapper[4603]: E0930 20:12:30.766230 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:12:42 crc kubenswrapper[4603]: I0930 20:12:42.766010 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:12:42 crc kubenswrapper[4603]: E0930 20:12:42.766770 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:12:54 crc kubenswrapper[4603]: I0930 20:12:54.764536 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:12:54 crc kubenswrapper[4603]: E0930 20:12:54.765644 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:13:06 crc kubenswrapper[4603]: I0930 20:13:06.765257 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:13:06 crc kubenswrapper[4603]: E0930 20:13:06.766977 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:13:17 crc kubenswrapper[4603]: I0930 20:13:17.764292 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:13:17 crc kubenswrapper[4603]: E0930 20:13:17.765036 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:13:32 crc kubenswrapper[4603]: I0930 20:13:32.764955 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:13:32 crc kubenswrapper[4603]: E0930 20:13:32.766392 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:13:46 crc kubenswrapper[4603]: I0930 20:13:46.764546 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:13:46 crc kubenswrapper[4603]: E0930 20:13:46.765147 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:13:58 crc kubenswrapper[4603]: I0930 20:13:58.773302 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:13:58 crc kubenswrapper[4603]: E0930 20:13:58.774992 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:14:12 crc kubenswrapper[4603]: I0930 20:14:12.765178 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:14:12 crc kubenswrapper[4603]: E0930 20:14:12.765812 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:14:27 crc kubenswrapper[4603]: I0930 20:14:27.765347 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:14:27 crc kubenswrapper[4603]: E0930 20:14:27.766393 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:14:41 crc kubenswrapper[4603]: I0930 20:14:41.764422 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:14:41 crc kubenswrapper[4603]: E0930 20:14:41.765195 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:14:52 crc kubenswrapper[4603]: I0930 20:14:52.064043 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-9wjnk"] Sep 30 20:14:52 crc kubenswrapper[4603]: I0930 20:14:52.080967 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-9wjnk"] Sep 30 20:14:52 crc kubenswrapper[4603]: I0930 20:14:52.781630 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb23a0b8-7639-452d-8c51-07b383706c82" path="/var/lib/kubelet/pods/bb23a0b8-7639-452d-8c51-07b383706c82/volumes" Sep 30 20:14:53 crc kubenswrapper[4603]: I0930 20:14:53.033340 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-fswsp"] Sep 30 20:14:53 crc kubenswrapper[4603]: I0930 20:14:53.039684 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-fswsp"] Sep 30 20:14:53 crc kubenswrapper[4603]: I0930 20:14:53.049575 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-9rbkg"] Sep 30 20:14:53 crc kubenswrapper[4603]: I0930 20:14:53.059714 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-9rbkg"] Sep 30 20:14:53 crc kubenswrapper[4603]: I0930 20:14:53.764188 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:14:53 crc kubenswrapper[4603]: E0930 20:14:53.764560 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:14:54 crc kubenswrapper[4603]: I0930 20:14:54.778814 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89aa30a7-1e3a-4413-921b-7cf759e4396a" path="/var/lib/kubelet/pods/89aa30a7-1e3a-4413-921b-7cf759e4396a/volumes" Sep 30 20:14:54 crc kubenswrapper[4603]: I0930 20:14:54.780249 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c78450b9-5e4b-4193-b593-a3d4d19f644a" path="/var/lib/kubelet/pods/c78450b9-5e4b-4193-b593-a3d4d19f644a/volumes" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.170447 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52"] Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172326 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172360 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172398 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172449 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172493 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172512 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="extract-utilities" Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172558 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172574 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172600 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172614 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: E0930 20:15:00.172636 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172648 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="extract-content" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.172961 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="af45e21d-fc9b-4a25-beaf-901168bf78da" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.173021 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a69451-8454-4635-a645-f8672059a224" containerName="registry-server" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.174134 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.177695 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.178337 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.187949 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52"] Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.250738 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c6kp\" (UniqueName: \"kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.250814 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.250880 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.353789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c6kp\" (UniqueName: \"kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.353868 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.353920 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.354913 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.374743 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.384061 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c6kp\" (UniqueName: \"kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp\") pod \"collect-profiles-29321055-5cz52\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.510462 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:00 crc kubenswrapper[4603]: I0930 20:15:00.976236 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52"] Sep 30 20:15:01 crc kubenswrapper[4603]: I0930 20:15:01.934577 4603 generic.go:334] "Generic (PLEG): container finished" podID="401333a8-8cdd-499e-a656-823b0fdc0828" containerID="76e2bf7601242fda162a65fd9b0412430835ddae5d79faad9b70af423d3546fa" exitCode=0 Sep 30 20:15:01 crc kubenswrapper[4603]: I0930 20:15:01.934830 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" event={"ID":"401333a8-8cdd-499e-a656-823b0fdc0828","Type":"ContainerDied","Data":"76e2bf7601242fda162a65fd9b0412430835ddae5d79faad9b70af423d3546fa"} Sep 30 20:15:01 crc kubenswrapper[4603]: I0930 20:15:01.934860 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" event={"ID":"401333a8-8cdd-499e-a656-823b0fdc0828","Type":"ContainerStarted","Data":"e2115f4ded83e065ac59c9ba25f17d35531fdadda3450f1de5ec6500cb951945"} Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.047689 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-90e2-account-create-782q5"] Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.062029 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-90e2-account-create-782q5"] Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.079945 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-f373-account-create-gp9pr"] Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.091425 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-f373-account-create-gp9pr"] Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.260098 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.421733 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume\") pod \"401333a8-8cdd-499e-a656-823b0fdc0828\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.421792 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c6kp\" (UniqueName: \"kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp\") pod \"401333a8-8cdd-499e-a656-823b0fdc0828\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.421965 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume\") pod \"401333a8-8cdd-499e-a656-823b0fdc0828\" (UID: \"401333a8-8cdd-499e-a656-823b0fdc0828\") " Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.422748 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume" (OuterVolumeSpecName: "config-volume") pod "401333a8-8cdd-499e-a656-823b0fdc0828" (UID: "401333a8-8cdd-499e-a656-823b0fdc0828"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.427348 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "401333a8-8cdd-499e-a656-823b0fdc0828" (UID: "401333a8-8cdd-499e-a656-823b0fdc0828"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.436482 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp" (OuterVolumeSpecName: "kube-api-access-5c6kp") pod "401333a8-8cdd-499e-a656-823b0fdc0828" (UID: "401333a8-8cdd-499e-a656-823b0fdc0828"). InnerVolumeSpecName "kube-api-access-5c6kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.524449 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/401333a8-8cdd-499e-a656-823b0fdc0828-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.524490 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/401333a8-8cdd-499e-a656-823b0fdc0828-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.524505 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c6kp\" (UniqueName: \"kubernetes.io/projected/401333a8-8cdd-499e-a656-823b0fdc0828-kube-api-access-5c6kp\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.956066 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" event={"ID":"401333a8-8cdd-499e-a656-823b0fdc0828","Type":"ContainerDied","Data":"e2115f4ded83e065ac59c9ba25f17d35531fdadda3450f1de5ec6500cb951945"} Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.956105 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2115f4ded83e065ac59c9ba25f17d35531fdadda3450f1de5ec6500cb951945" Sep 30 20:15:03 crc kubenswrapper[4603]: I0930 20:15:03.956204 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52" Sep 30 20:15:04 crc kubenswrapper[4603]: I0930 20:15:04.031077 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-f8e9-account-create-7pvts"] Sep 30 20:15:04 crc kubenswrapper[4603]: I0930 20:15:04.041403 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-f8e9-account-create-7pvts"] Sep 30 20:15:04 crc kubenswrapper[4603]: I0930 20:15:04.774436 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05206bfc-d8c0-4584-99d7-e6db6a05c14e" path="/var/lib/kubelet/pods/05206bfc-d8c0-4584-99d7-e6db6a05c14e/volumes" Sep 30 20:15:04 crc kubenswrapper[4603]: I0930 20:15:04.776180 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10f773b8-d62b-4ef7-94c6-3c264a0428c0" path="/var/lib/kubelet/pods/10f773b8-d62b-4ef7-94c6-3c264a0428c0/volumes" Sep 30 20:15:04 crc kubenswrapper[4603]: I0930 20:15:04.777571 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="877f3d6c-8f86-4b20-b6c1-0fffb1668924" path="/var/lib/kubelet/pods/877f3d6c-8f86-4b20-b6c1-0fffb1668924/volumes" Sep 30 20:15:05 crc kubenswrapper[4603]: I0930 20:15:05.764603 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:15:05 crc kubenswrapper[4603]: E0930 20:15:05.765088 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:15:10 crc kubenswrapper[4603]: I0930 20:15:10.024385 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-swff5"] Sep 30 20:15:10 crc kubenswrapper[4603]: I0930 20:15:10.034000 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-swff5"] Sep 30 20:15:10 crc kubenswrapper[4603]: I0930 20:15:10.777304 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2023d167-8ee7-4bdc-ad0e-763b76c6a1d7" path="/var/lib/kubelet/pods/2023d167-8ee7-4bdc-ad0e-763b76c6a1d7/volumes" Sep 30 20:15:11 crc kubenswrapper[4603]: I0930 20:15:11.043353 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-z7jg7"] Sep 30 20:15:11 crc kubenswrapper[4603]: I0930 20:15:11.057274 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-f2p2m"] Sep 30 20:15:11 crc kubenswrapper[4603]: I0930 20:15:11.068736 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-f2p2m"] Sep 30 20:15:11 crc kubenswrapper[4603]: I0930 20:15:11.079515 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-z7jg7"] Sep 30 20:15:12 crc kubenswrapper[4603]: I0930 20:15:12.776346 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d5951d8-7653-4312-978c-f31522c6c282" path="/var/lib/kubelet/pods/4d5951d8-7653-4312-978c-f31522c6c282/volumes" Sep 30 20:15:12 crc kubenswrapper[4603]: I0930 20:15:12.777976 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="799d29f7-1992-4c4a-8f3e-dd18927238a2" path="/var/lib/kubelet/pods/799d29f7-1992-4c4a-8f3e-dd18927238a2/volumes" Sep 30 20:15:14 crc kubenswrapper[4603]: I0930 20:15:14.045731 4603 generic.go:334] "Generic (PLEG): container finished" podID="d826ad98-bfbb-4355-b0a2-c7ea9715b990" containerID="68c2c87a36caa9b0f6c75b5c0f0e7d7a0b8614ac29674361de6514a4d4b9e2fd" exitCode=0 Sep 30 20:15:14 crc kubenswrapper[4603]: I0930 20:15:14.045784 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" event={"ID":"d826ad98-bfbb-4355-b0a2-c7ea9715b990","Type":"ContainerDied","Data":"68c2c87a36caa9b0f6c75b5c0f0e7d7a0b8614ac29674361de6514a4d4b9e2fd"} Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.451435 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.544784 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory\") pod \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.544860 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m9nt\" (UniqueName: \"kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt\") pod \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.544950 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key\") pod \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.545134 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle\") pod \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\" (UID: \"d826ad98-bfbb-4355-b0a2-c7ea9715b990\") " Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.551994 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d826ad98-bfbb-4355-b0a2-c7ea9715b990" (UID: "d826ad98-bfbb-4355-b0a2-c7ea9715b990"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.556061 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt" (OuterVolumeSpecName: "kube-api-access-4m9nt") pod "d826ad98-bfbb-4355-b0a2-c7ea9715b990" (UID: "d826ad98-bfbb-4355-b0a2-c7ea9715b990"). InnerVolumeSpecName "kube-api-access-4m9nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.582439 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d826ad98-bfbb-4355-b0a2-c7ea9715b990" (UID: "d826ad98-bfbb-4355-b0a2-c7ea9715b990"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.615374 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory" (OuterVolumeSpecName: "inventory") pod "d826ad98-bfbb-4355-b0a2-c7ea9715b990" (UID: "d826ad98-bfbb-4355-b0a2-c7ea9715b990"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.647311 4603 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.647342 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.647352 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m9nt\" (UniqueName: \"kubernetes.io/projected/d826ad98-bfbb-4355-b0a2-c7ea9715b990-kube-api-access-4m9nt\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:15 crc kubenswrapper[4603]: I0930 20:15:15.647362 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d826ad98-bfbb-4355-b0a2-c7ea9715b990-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.071206 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" event={"ID":"d826ad98-bfbb-4355-b0a2-c7ea9715b990","Type":"ContainerDied","Data":"62144a64357d04b93f60e7996e13dd3fa8980f84f3e9e982311f85e0776e6cc5"} Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.071241 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62144a64357d04b93f60e7996e13dd3fa8980f84f3e9e982311f85e0776e6cc5" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.071299 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.173358 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss"] Sep 30 20:15:16 crc kubenswrapper[4603]: E0930 20:15:16.174377 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="401333a8-8cdd-499e-a656-823b0fdc0828" containerName="collect-profiles" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.174517 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="401333a8-8cdd-499e-a656-823b0fdc0828" containerName="collect-profiles" Sep 30 20:15:16 crc kubenswrapper[4603]: E0930 20:15:16.174625 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d826ad98-bfbb-4355-b0a2-c7ea9715b990" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.174700 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d826ad98-bfbb-4355-b0a2-c7ea9715b990" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.175255 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d826ad98-bfbb-4355-b0a2-c7ea9715b990" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.175405 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="401333a8-8cdd-499e-a656-823b0fdc0828" containerName="collect-profiles" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.176358 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.178461 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.179137 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.179415 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.179545 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.198911 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss"] Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.364601 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.364682 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.364753 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lvtt\" (UniqueName: \"kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.466644 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.466962 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.467038 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lvtt\" (UniqueName: \"kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.473724 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.474320 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.485917 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lvtt\" (UniqueName: \"kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-nrkss\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:16 crc kubenswrapper[4603]: I0930 20:15:16.492450 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:15:17 crc kubenswrapper[4603]: I0930 20:15:17.026834 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss"] Sep 30 20:15:17 crc kubenswrapper[4603]: W0930 20:15:17.033460 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71fad0dd_a3d8_42b4_ab00_d98aa7368c5f.slice/crio-777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6 WatchSource:0}: Error finding container 777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6: Status 404 returned error can't find the container with id 777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6 Sep 30 20:15:17 crc kubenswrapper[4603]: I0930 20:15:17.036408 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:15:17 crc kubenswrapper[4603]: I0930 20:15:17.081447 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" event={"ID":"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f","Type":"ContainerStarted","Data":"777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6"} Sep 30 20:15:17 crc kubenswrapper[4603]: I0930 20:15:17.764215 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:15:17 crc kubenswrapper[4603]: E0930 20:15:17.765635 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:15:18 crc kubenswrapper[4603]: I0930 20:15:18.090154 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" event={"ID":"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f","Type":"ContainerStarted","Data":"d162b65499b013d85502dac0be297f9aee5a2a27966f5982f37eb198c5659aa8"} Sep 30 20:15:18 crc kubenswrapper[4603]: I0930 20:15:18.110012 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" podStartSLOduration=1.8945200180000001 podStartE2EDuration="2.109991949s" podCreationTimestamp="2025-09-30 20:15:16 +0000 UTC" firstStartedPulling="2025-09-30 20:15:17.036116181 +0000 UTC m=+1718.974574999" lastFinishedPulling="2025-09-30 20:15:17.251588122 +0000 UTC m=+1719.190046930" observedRunningTime="2025-09-30 20:15:18.103798607 +0000 UTC m=+1720.042257425" watchObservedRunningTime="2025-09-30 20:15:18.109991949 +0000 UTC m=+1720.048450767" Sep 30 20:15:27 crc kubenswrapper[4603]: I0930 20:15:27.046675 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-df76-account-create-9qz69"] Sep 30 20:15:27 crc kubenswrapper[4603]: I0930 20:15:27.055827 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-df76-account-create-9qz69"] Sep 30 20:15:28 crc kubenswrapper[4603]: I0930 20:15:28.783633 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03487c1c-ba57-440c-a854-1c0238e51bfb" path="/var/lib/kubelet/pods/03487c1c-ba57-440c-a854-1c0238e51bfb/volumes" Sep 30 20:15:29 crc kubenswrapper[4603]: I0930 20:15:29.764127 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:15:29 crc kubenswrapper[4603]: E0930 20:15:29.764483 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.030261 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7690-account-create-hfkfp"] Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.041629 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7690-account-create-hfkfp"] Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.053288 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0f43-account-create-cl6t4"] Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.062697 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0f43-account-create-cl6t4"] Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.783819 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c26e760-1c76-4f97-9df8-101bdf01d8ec" path="/var/lib/kubelet/pods/2c26e760-1c76-4f97-9df8-101bdf01d8ec/volumes" Sep 30 20:15:30 crc kubenswrapper[4603]: I0930 20:15:30.787087 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cef5d2f6-4ebf-4473-9e7e-87904a61af9b" path="/var/lib/kubelet/pods/cef5d2f6-4ebf-4473-9e7e-87904a61af9b/volumes" Sep 30 20:15:36 crc kubenswrapper[4603]: I0930 20:15:36.029228 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-7rprt"] Sep 30 20:15:36 crc kubenswrapper[4603]: I0930 20:15:36.045117 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-7rprt"] Sep 30 20:15:36 crc kubenswrapper[4603]: I0930 20:15:36.777409 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b221670d-0e9e-427c-845c-7c00c4566e64" path="/var/lib/kubelet/pods/b221670d-0e9e-427c-845c-7c00c4566e64/volumes" Sep 30 20:15:43 crc kubenswrapper[4603]: I0930 20:15:43.764560 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:15:43 crc kubenswrapper[4603]: E0930 20:15:43.765665 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.504659 4603 scope.go:117] "RemoveContainer" containerID="f7081fc457ee52b17531bbe0dfc3ae102d4c46a12f943a21c15e29f118e0ab72" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.562302 4603 scope.go:117] "RemoveContainer" containerID="23b250af8878a8aa2fc8ed928c780307176aa9f4310608076f6f1831cd7c9592" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.597185 4603 scope.go:117] "RemoveContainer" containerID="8ea5bd3f41ee547f776496ed2d7f22bc68225392c89bd8691d53d592eae101c2" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.639235 4603 scope.go:117] "RemoveContainer" containerID="479e2006b025369985ea9ae04e36043299cae4ad887e69d513c56e1c3dd76224" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.686541 4603 scope.go:117] "RemoveContainer" containerID="b623828695c337f7af1c8501565cced4a898d473cfde8f3bdcbf3ddfe9e68048" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.723447 4603 scope.go:117] "RemoveContainer" containerID="55a91bbb098b668967de61f6bed823ab586b2624a7ff7310a788f38266e65468" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.760126 4603 scope.go:117] "RemoveContainer" containerID="9bf80d0bd69f0f7e471f0b5e97d11a9aab5218d47c3051d86703affcc071bcfb" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.793387 4603 scope.go:117] "RemoveContainer" containerID="650b3a5f2c5d23a260cc4071fbb5e31f930a45034cf67466a19f1cb82e9ab21c" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.814015 4603 scope.go:117] "RemoveContainer" containerID="b705fe0525130499ba74e114a6078011fd8a5c91df23767b36376d71764e5fc4" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.833132 4603 scope.go:117] "RemoveContainer" containerID="2e7751a8c6b3c137f2a4a924250b8e90584210c76255eedd69a0a56e29d0e60d" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.855411 4603 scope.go:117] "RemoveContainer" containerID="8e3dd8175de389fc27b0ee35ee462aa0e6b8558a05cb30f503c979cecc90f946" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.875659 4603 scope.go:117] "RemoveContainer" containerID="2091078eb8571c9e7f9ed81d4647dfbcf7256a20de91c5638b8431017b65934c" Sep 30 20:15:44 crc kubenswrapper[4603]: I0930 20:15:44.895077 4603 scope.go:117] "RemoveContainer" containerID="bfaedb4545f6258ff662427fa9480cf705af306ace442c65872a6f86c7e3b73d" Sep 30 20:15:55 crc kubenswrapper[4603]: I0930 20:15:55.764901 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:15:55 crc kubenswrapper[4603]: E0930 20:15:55.766269 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:16:07 crc kubenswrapper[4603]: I0930 20:16:07.764643 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:16:07 crc kubenswrapper[4603]: E0930 20:16:07.766379 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:16:21 crc kubenswrapper[4603]: I0930 20:16:21.765826 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:16:21 crc kubenswrapper[4603]: E0930 20:16:21.767242 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:16:26 crc kubenswrapper[4603]: I0930 20:16:26.073877 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-gb2nj"] Sep 30 20:16:26 crc kubenswrapper[4603]: I0930 20:16:26.089045 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-gb2nj"] Sep 30 20:16:26 crc kubenswrapper[4603]: I0930 20:16:26.784970 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4255d3b6-48b0-4a39-8991-bd70191f02ee" path="/var/lib/kubelet/pods/4255d3b6-48b0-4a39-8991-bd70191f02ee/volumes" Sep 30 20:16:36 crc kubenswrapper[4603]: I0930 20:16:36.764902 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:16:36 crc kubenswrapper[4603]: E0930 20:16:36.765748 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:16:37 crc kubenswrapper[4603]: I0930 20:16:37.047534 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-2m748"] Sep 30 20:16:37 crc kubenswrapper[4603]: I0930 20:16:37.055654 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-2m748"] Sep 30 20:16:38 crc kubenswrapper[4603]: I0930 20:16:38.045754 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9k8jw"] Sep 30 20:16:38 crc kubenswrapper[4603]: I0930 20:16:38.062234 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9k8jw"] Sep 30 20:16:38 crc kubenswrapper[4603]: I0930 20:16:38.796460 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f561a20-3add-4fea-88e3-15e16af5d2d3" path="/var/lib/kubelet/pods/9f561a20-3add-4fea-88e3-15e16af5d2d3/volumes" Sep 30 20:16:38 crc kubenswrapper[4603]: I0930 20:16:38.799622 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be703fd8-b3d7-4462-a905-5a835f8e2125" path="/var/lib/kubelet/pods/be703fd8-b3d7-4462-a905-5a835f8e2125/volumes" Sep 30 20:16:45 crc kubenswrapper[4603]: I0930 20:16:45.117669 4603 scope.go:117] "RemoveContainer" containerID="fff0e3ddc8c18a9925865d4dfd15879c17f595cdf49f1421d2851e4e0734e809" Sep 30 20:16:45 crc kubenswrapper[4603]: I0930 20:16:45.179186 4603 scope.go:117] "RemoveContainer" containerID="86a68997eaac56d0e2ed27db846e648bfd83959a62837c3b92915317593f148c" Sep 30 20:16:45 crc kubenswrapper[4603]: I0930 20:16:45.225464 4603 scope.go:117] "RemoveContainer" containerID="74677c69850fc7b0c850d1d10d905fcb3f41f9a9c3f0ce990525d54958d10128" Sep 30 20:16:51 crc kubenswrapper[4603]: I0930 20:16:51.765155 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:16:52 crc kubenswrapper[4603]: I0930 20:16:52.118089 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e"} Sep 30 20:16:57 crc kubenswrapper[4603]: I0930 20:16:57.052311 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-t94b6"] Sep 30 20:16:57 crc kubenswrapper[4603]: I0930 20:16:57.059543 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-t94b6"] Sep 30 20:16:58 crc kubenswrapper[4603]: I0930 20:16:58.029204 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-cgrnb"] Sep 30 20:16:58 crc kubenswrapper[4603]: I0930 20:16:58.036288 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-cgrnb"] Sep 30 20:16:58 crc kubenswrapper[4603]: I0930 20:16:58.779953 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49895f67-376c-4c08-9382-18aee2212e04" path="/var/lib/kubelet/pods/49895f67-376c-4c08-9382-18aee2212e04/volumes" Sep 30 20:16:58 crc kubenswrapper[4603]: I0930 20:16:58.782498 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a87079f-7cb1-447c-a950-bb204031afce" path="/var/lib/kubelet/pods/7a87079f-7cb1-447c-a950-bb204031afce/volumes" Sep 30 20:17:06 crc kubenswrapper[4603]: I0930 20:17:06.244962 4603 generic.go:334] "Generic (PLEG): container finished" podID="71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" containerID="d162b65499b013d85502dac0be297f9aee5a2a27966f5982f37eb198c5659aa8" exitCode=0 Sep 30 20:17:06 crc kubenswrapper[4603]: I0930 20:17:06.245092 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" event={"ID":"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f","Type":"ContainerDied","Data":"d162b65499b013d85502dac0be297f9aee5a2a27966f5982f37eb198c5659aa8"} Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.035394 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4265b"] Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.043610 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4265b"] Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.807202 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.835239 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key\") pod \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.835315 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory\") pod \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.835369 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lvtt\" (UniqueName: \"kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt\") pod \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\" (UID: \"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f\") " Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.845344 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt" (OuterVolumeSpecName: "kube-api-access-6lvtt") pod "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" (UID: "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f"). InnerVolumeSpecName "kube-api-access-6lvtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.866500 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory" (OuterVolumeSpecName: "inventory") pod "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" (UID: "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.887316 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" (UID: "71fad0dd-a3d8-42b4-ab00-d98aa7368c5f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.937806 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.937849 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4603]: I0930 20:17:07.937860 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lvtt\" (UniqueName: \"kubernetes.io/projected/71fad0dd-a3d8-42b4-ab00-d98aa7368c5f-kube-api-access-6lvtt\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.262442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" event={"ID":"71fad0dd-a3d8-42b4-ab00-d98aa7368c5f","Type":"ContainerDied","Data":"777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6"} Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.262479 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="777bf60efd3269b7cb64be4c1cc8260c6946db3934b182de16174b644d2db9e6" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.262534 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-nrkss" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.377686 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn"] Sep 30 20:17:08 crc kubenswrapper[4603]: E0930 20:17:08.378129 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.378152 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.378407 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="71fad0dd-a3d8-42b4-ab00-d98aa7368c5f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.379116 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.381929 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.382552 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.386325 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.388952 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.390323 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn"] Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.447159 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbdg7\" (UniqueName: \"kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.447384 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.447439 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.549404 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.549745 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.549796 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbdg7\" (UniqueName: \"kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.553790 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.553824 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.574501 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbdg7\" (UniqueName: \"kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-47wnn\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.710142 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:17:08 crc kubenswrapper[4603]: I0930 20:17:08.775437 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a86c5953-cb61-4f11-b581-eb7698adf7ec" path="/var/lib/kubelet/pods/a86c5953-cb61-4f11-b581-eb7698adf7ec/volumes" Sep 30 20:17:09 crc kubenswrapper[4603]: I0930 20:17:09.290192 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn"] Sep 30 20:17:09 crc kubenswrapper[4603]: W0930 20:17:09.296958 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf8365b2_113b_4c7b_8781_17cecdd6d3dd.slice/crio-780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42 WatchSource:0}: Error finding container 780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42: Status 404 returned error can't find the container with id 780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42 Sep 30 20:17:10 crc kubenswrapper[4603]: I0930 20:17:10.277656 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" event={"ID":"af8365b2-113b-4c7b-8781-17cecdd6d3dd","Type":"ContainerStarted","Data":"152813b06acf47582012f8cfede415304bd63ce119668079613c50d290676527"} Sep 30 20:17:10 crc kubenswrapper[4603]: I0930 20:17:10.277958 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" event={"ID":"af8365b2-113b-4c7b-8781-17cecdd6d3dd","Type":"ContainerStarted","Data":"780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42"} Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.039353 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" podStartSLOduration=32.900724479 podStartE2EDuration="33.039326353s" podCreationTimestamp="2025-09-30 20:17:08 +0000 UTC" firstStartedPulling="2025-09-30 20:17:09.299115488 +0000 UTC m=+1831.237574306" lastFinishedPulling="2025-09-30 20:17:09.437717362 +0000 UTC m=+1831.376176180" observedRunningTime="2025-09-30 20:17:10.298100351 +0000 UTC m=+1832.236559169" watchObservedRunningTime="2025-09-30 20:17:41.039326353 +0000 UTC m=+1862.977785211" Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.050352 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-hhjx8"] Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.059650 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-9nddn"] Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.075513 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-hhjx8"] Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.083490 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-pdg9l"] Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.091759 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-pdg9l"] Sep 30 20:17:41 crc kubenswrapper[4603]: I0930 20:17:41.100104 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-9nddn"] Sep 30 20:17:42 crc kubenswrapper[4603]: I0930 20:17:42.776929 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eea2891-5c31-482c-bb06-e829ee4348cd" path="/var/lib/kubelet/pods/2eea2891-5c31-482c-bb06-e829ee4348cd/volumes" Sep 30 20:17:42 crc kubenswrapper[4603]: I0930 20:17:42.777478 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f4f28e5-ceda-4147-9bb8-2f7c6142b78a" path="/var/lib/kubelet/pods/4f4f28e5-ceda-4147-9bb8-2f7c6142b78a/volumes" Sep 30 20:17:42 crc kubenswrapper[4603]: I0930 20:17:42.777962 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c01e2132-c5c6-4024-b8a8-38458b50eb14" path="/var/lib/kubelet/pods/c01e2132-c5c6-4024-b8a8-38458b50eb14/volumes" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.359974 4603 scope.go:117] "RemoveContainer" containerID="62522f9a75213475d390fbff44d7121abdbce5d6413f81455635c28fa06501e0" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.392156 4603 scope.go:117] "RemoveContainer" containerID="785a9a7fe6aa5b441c87196db57d1d4042df017621d17b20a4b96b1b0cb8362f" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.450694 4603 scope.go:117] "RemoveContainer" containerID="202e659da32a9e682700e9f14341f18f494de34a1c84c1b04b4984e931ad9ca9" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.489218 4603 scope.go:117] "RemoveContainer" containerID="84c7df746db3444cf2d62ebc0a0db4f41e236efaa9b46d776eb254476f881a9f" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.530911 4603 scope.go:117] "RemoveContainer" containerID="cd8b1600ab56800f68b8589a378a3aaf9ac503c0fde0f279b7a4ca8c3b1ea168" Sep 30 20:17:45 crc kubenswrapper[4603]: I0930 20:17:45.561181 4603 scope.go:117] "RemoveContainer" containerID="6af1b3a8e48f883f887e66087826e735d02918d6ba47bc2dc401412932551a41" Sep 30 20:17:54 crc kubenswrapper[4603]: I0930 20:17:54.034514 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5122-account-create-ps78f"] Sep 30 20:17:54 crc kubenswrapper[4603]: I0930 20:17:54.045153 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5122-account-create-ps78f"] Sep 30 20:17:54 crc kubenswrapper[4603]: I0930 20:17:54.783260 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02e7f5a1-3809-4c70-8bfe-ac06acb64ac2" path="/var/lib/kubelet/pods/02e7f5a1-3809-4c70-8bfe-ac06acb64ac2/volumes" Sep 30 20:17:55 crc kubenswrapper[4603]: I0930 20:17:55.040476 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cba5-account-create-mr2xq"] Sep 30 20:17:55 crc kubenswrapper[4603]: I0930 20:17:55.047656 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3142-account-create-2kf4s"] Sep 30 20:17:55 crc kubenswrapper[4603]: I0930 20:17:55.056119 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cba5-account-create-mr2xq"] Sep 30 20:17:55 crc kubenswrapper[4603]: I0930 20:17:55.064103 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3142-account-create-2kf4s"] Sep 30 20:17:56 crc kubenswrapper[4603]: I0930 20:17:56.794448 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0e45af4-c8e3-4dbd-94f3-3742b31b6e63" path="/var/lib/kubelet/pods/d0e45af4-c8e3-4dbd-94f3-3742b31b6e63/volumes" Sep 30 20:17:56 crc kubenswrapper[4603]: I0930 20:17:56.796356 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe01d8e6-6f43-47f3-a31f-e8ecbc536785" path="/var/lib/kubelet/pods/fe01d8e6-6f43-47f3-a31f-e8ecbc536785/volumes" Sep 30 20:18:26 crc kubenswrapper[4603]: I0930 20:18:26.051687 4603 generic.go:334] "Generic (PLEG): container finished" podID="af8365b2-113b-4c7b-8781-17cecdd6d3dd" containerID="152813b06acf47582012f8cfede415304bd63ce119668079613c50d290676527" exitCode=0 Sep 30 20:18:26 crc kubenswrapper[4603]: I0930 20:18:26.051773 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" event={"ID":"af8365b2-113b-4c7b-8781-17cecdd6d3dd","Type":"ContainerDied","Data":"152813b06acf47582012f8cfede415304bd63ce119668079613c50d290676527"} Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.543144 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.656731 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key\") pod \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.656816 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory\") pod \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.656952 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbdg7\" (UniqueName: \"kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7\") pod \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\" (UID: \"af8365b2-113b-4c7b-8781-17cecdd6d3dd\") " Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.667481 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7" (OuterVolumeSpecName: "kube-api-access-jbdg7") pod "af8365b2-113b-4c7b-8781-17cecdd6d3dd" (UID: "af8365b2-113b-4c7b-8781-17cecdd6d3dd"). InnerVolumeSpecName "kube-api-access-jbdg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.689366 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory" (OuterVolumeSpecName: "inventory") pod "af8365b2-113b-4c7b-8781-17cecdd6d3dd" (UID: "af8365b2-113b-4c7b-8781-17cecdd6d3dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.689423 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af8365b2-113b-4c7b-8781-17cecdd6d3dd" (UID: "af8365b2-113b-4c7b-8781-17cecdd6d3dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.758988 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.759036 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af8365b2-113b-4c7b-8781-17cecdd6d3dd-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:27 crc kubenswrapper[4603]: I0930 20:18:27.759049 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbdg7\" (UniqueName: \"kubernetes.io/projected/af8365b2-113b-4c7b-8781-17cecdd6d3dd-kube-api-access-jbdg7\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.072472 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" event={"ID":"af8365b2-113b-4c7b-8781-17cecdd6d3dd","Type":"ContainerDied","Data":"780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42"} Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.072972 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="780af066e0b03e55e489d0f996b3827080feb7584295971299a5181a15322e42" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.072515 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-47wnn" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.163038 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72"] Sep 30 20:18:28 crc kubenswrapper[4603]: E0930 20:18:28.163436 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af8365b2-113b-4c7b-8781-17cecdd6d3dd" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.163451 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="af8365b2-113b-4c7b-8781-17cecdd6d3dd" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.163651 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="af8365b2-113b-4c7b-8781-17cecdd6d3dd" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.165636 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.167359 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.168616 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.168787 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.168943 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.184729 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72"] Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.272723 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.272842 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.273203 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxpdw\" (UniqueName: \"kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.375229 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxpdw\" (UniqueName: \"kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.375386 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.375583 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.388923 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.388940 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.392881 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxpdw\" (UniqueName: \"kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6kz72\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:28 crc kubenswrapper[4603]: I0930 20:18:28.486222 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:29 crc kubenswrapper[4603]: I0930 20:18:29.059810 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72"] Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.070264 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9pth"] Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.077879 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9pth"] Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.092637 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" event={"ID":"2b982ca3-121d-442a-bd28-cf1623afe138","Type":"ContainerStarted","Data":"b1892ef2cd1abaea66b63fd16359d30c150b22e7e85fa050f67fd6bb4d973024"} Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.092692 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" event={"ID":"2b982ca3-121d-442a-bd28-cf1623afe138","Type":"ContainerStarted","Data":"9aaffe88df7fff11e4cfca2678d99c87aebca85f821ac1be423c64300f79bf97"} Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.108103 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" podStartSLOduration=1.973473318 podStartE2EDuration="2.108082242s" podCreationTimestamp="2025-09-30 20:18:28 +0000 UTC" firstStartedPulling="2025-09-30 20:18:29.078474126 +0000 UTC m=+1911.016932954" lastFinishedPulling="2025-09-30 20:18:29.21308306 +0000 UTC m=+1911.151541878" observedRunningTime="2025-09-30 20:18:30.105056688 +0000 UTC m=+1912.043515506" watchObservedRunningTime="2025-09-30 20:18:30.108082242 +0000 UTC m=+1912.046541060" Sep 30 20:18:30 crc kubenswrapper[4603]: I0930 20:18:30.779565 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2e05975-2977-47a5-9881-2b0996dfc973" path="/var/lib/kubelet/pods/f2e05975-2977-47a5-9881-2b0996dfc973/volumes" Sep 30 20:18:35 crc kubenswrapper[4603]: I0930 20:18:35.156917 4603 generic.go:334] "Generic (PLEG): container finished" podID="2b982ca3-121d-442a-bd28-cf1623afe138" containerID="b1892ef2cd1abaea66b63fd16359d30c150b22e7e85fa050f67fd6bb4d973024" exitCode=0 Sep 30 20:18:35 crc kubenswrapper[4603]: I0930 20:18:35.157008 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" event={"ID":"2b982ca3-121d-442a-bd28-cf1623afe138","Type":"ContainerDied","Data":"b1892ef2cd1abaea66b63fd16359d30c150b22e7e85fa050f67fd6bb4d973024"} Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.627429 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.741305 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxpdw\" (UniqueName: \"kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw\") pod \"2b982ca3-121d-442a-bd28-cf1623afe138\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.741383 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory\") pod \"2b982ca3-121d-442a-bd28-cf1623afe138\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.741444 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key\") pod \"2b982ca3-121d-442a-bd28-cf1623afe138\" (UID: \"2b982ca3-121d-442a-bd28-cf1623afe138\") " Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.747486 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw" (OuterVolumeSpecName: "kube-api-access-kxpdw") pod "2b982ca3-121d-442a-bd28-cf1623afe138" (UID: "2b982ca3-121d-442a-bd28-cf1623afe138"). InnerVolumeSpecName "kube-api-access-kxpdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.774458 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory" (OuterVolumeSpecName: "inventory") pod "2b982ca3-121d-442a-bd28-cf1623afe138" (UID: "2b982ca3-121d-442a-bd28-cf1623afe138"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.797871 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2b982ca3-121d-442a-bd28-cf1623afe138" (UID: "2b982ca3-121d-442a-bd28-cf1623afe138"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.847806 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxpdw\" (UniqueName: \"kubernetes.io/projected/2b982ca3-121d-442a-bd28-cf1623afe138-kube-api-access-kxpdw\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.847848 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:36 crc kubenswrapper[4603]: I0930 20:18:36.847865 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b982ca3-121d-442a-bd28-cf1623afe138-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.180534 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" event={"ID":"2b982ca3-121d-442a-bd28-cf1623afe138","Type":"ContainerDied","Data":"9aaffe88df7fff11e4cfca2678d99c87aebca85f821ac1be423c64300f79bf97"} Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.181249 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9aaffe88df7fff11e4cfca2678d99c87aebca85f821ac1be423c64300f79bf97" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.180613 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6kz72" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.274372 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8"] Sep 30 20:18:37 crc kubenswrapper[4603]: E0930 20:18:37.274926 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b982ca3-121d-442a-bd28-cf1623afe138" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.274952 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b982ca3-121d-442a-bd28-cf1623afe138" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.275264 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b982ca3-121d-442a-bd28-cf1623afe138" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.276245 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.280041 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.281974 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.282933 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.285204 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.302450 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8"] Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.359242 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.359402 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.359540 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r77r\" (UniqueName: \"kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.462042 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r77r\" (UniqueName: \"kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.462194 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.462264 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.467629 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.468220 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.481609 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r77r\" (UniqueName: \"kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zvdt8\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:37 crc kubenswrapper[4603]: I0930 20:18:37.595629 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:18:38 crc kubenswrapper[4603]: I0930 20:18:38.210582 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8"] Sep 30 20:18:39 crc kubenswrapper[4603]: I0930 20:18:39.202387 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" event={"ID":"fea678fb-af98-424b-9231-32d6991910a3","Type":"ContainerStarted","Data":"75ca0d008934f6b4d6dc96476e90de713de1564a86cfa215e7ef4ee1d964bb76"} Sep 30 20:18:39 crc kubenswrapper[4603]: I0930 20:18:39.203369 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" event={"ID":"fea678fb-af98-424b-9231-32d6991910a3","Type":"ContainerStarted","Data":"c0392a14bb878b215eea7e7bd341fac722e756ea522f21789063e8cabc21cc06"} Sep 30 20:18:39 crc kubenswrapper[4603]: I0930 20:18:39.236089 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" podStartSLOduration=2.032728407 podStartE2EDuration="2.236065951s" podCreationTimestamp="2025-09-30 20:18:37 +0000 UTC" firstStartedPulling="2025-09-30 20:18:38.22564181 +0000 UTC m=+1920.164100638" lastFinishedPulling="2025-09-30 20:18:38.428979364 +0000 UTC m=+1920.367438182" observedRunningTime="2025-09-30 20:18:39.22593846 +0000 UTC m=+1921.164397288" watchObservedRunningTime="2025-09-30 20:18:39.236065951 +0000 UTC m=+1921.174524779" Sep 30 20:18:45 crc kubenswrapper[4603]: I0930 20:18:45.714929 4603 scope.go:117] "RemoveContainer" containerID="ff8ed9fb071fc43e165463822d66a4b7aed5d5ab103f1e48d9f7ce6a529ccd28" Sep 30 20:18:45 crc kubenswrapper[4603]: I0930 20:18:45.763319 4603 scope.go:117] "RemoveContainer" containerID="be872dbf9dcea86dd7dc4d41756de80754b59d61704f406c73460bb937bde769" Sep 30 20:18:45 crc kubenswrapper[4603]: I0930 20:18:45.824410 4603 scope.go:117] "RemoveContainer" containerID="50ec0e1d60ef266801438be0090b207031dac3cea909ed39f946d928fecdf0f3" Sep 30 20:18:45 crc kubenswrapper[4603]: I0930 20:18:45.860484 4603 scope.go:117] "RemoveContainer" containerID="3b1fe09d26a9e478bf47b6644e1f5446d71d7104e5e50bd861f418d41cf03dde" Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.031475 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7j69x"] Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.043006 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6hwp"] Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.049659 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7j69x"] Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.056458 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6hwp"] Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.782257 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8983ec27-7bbe-4844-a826-9a7ce168e605" path="/var/lib/kubelet/pods/8983ec27-7bbe-4844-a826-9a7ce168e605/volumes" Sep 30 20:18:54 crc kubenswrapper[4603]: I0930 20:18:54.785490 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9336de02-cccd-4f98-a9b1-472833b48fca" path="/var/lib/kubelet/pods/9336de02-cccd-4f98-a9b1-472833b48fca/volumes" Sep 30 20:19:08 crc kubenswrapper[4603]: I0930 20:19:08.442068 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:08 crc kubenswrapper[4603]: I0930 20:19:08.442793 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:19:23 crc kubenswrapper[4603]: I0930 20:19:23.645559 4603 generic.go:334] "Generic (PLEG): container finished" podID="fea678fb-af98-424b-9231-32d6991910a3" containerID="75ca0d008934f6b4d6dc96476e90de713de1564a86cfa215e7ef4ee1d964bb76" exitCode=0 Sep 30 20:19:23 crc kubenswrapper[4603]: I0930 20:19:23.645605 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" event={"ID":"fea678fb-af98-424b-9231-32d6991910a3","Type":"ContainerDied","Data":"75ca0d008934f6b4d6dc96476e90de713de1564a86cfa215e7ef4ee1d964bb76"} Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.144239 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.251115 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key\") pod \"fea678fb-af98-424b-9231-32d6991910a3\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.251539 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory\") pod \"fea678fb-af98-424b-9231-32d6991910a3\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.251704 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r77r\" (UniqueName: \"kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r\") pod \"fea678fb-af98-424b-9231-32d6991910a3\" (UID: \"fea678fb-af98-424b-9231-32d6991910a3\") " Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.258039 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r" (OuterVolumeSpecName: "kube-api-access-9r77r") pod "fea678fb-af98-424b-9231-32d6991910a3" (UID: "fea678fb-af98-424b-9231-32d6991910a3"). InnerVolumeSpecName "kube-api-access-9r77r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.285216 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory" (OuterVolumeSpecName: "inventory") pod "fea678fb-af98-424b-9231-32d6991910a3" (UID: "fea678fb-af98-424b-9231-32d6991910a3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.293426 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fea678fb-af98-424b-9231-32d6991910a3" (UID: "fea678fb-af98-424b-9231-32d6991910a3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.354104 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.354144 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r77r\" (UniqueName: \"kubernetes.io/projected/fea678fb-af98-424b-9231-32d6991910a3-kube-api-access-9r77r\") on node \"crc\" DevicePath \"\"" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.354159 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fea678fb-af98-424b-9231-32d6991910a3-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.666747 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" event={"ID":"fea678fb-af98-424b-9231-32d6991910a3","Type":"ContainerDied","Data":"c0392a14bb878b215eea7e7bd341fac722e756ea522f21789063e8cabc21cc06"} Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.667548 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0392a14bb878b215eea7e7bd341fac722e756ea522f21789063e8cabc21cc06" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.667496 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zvdt8" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.812661 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f"] Sep 30 20:19:25 crc kubenswrapper[4603]: E0930 20:19:25.813088 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fea678fb-af98-424b-9231-32d6991910a3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.813106 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fea678fb-af98-424b-9231-32d6991910a3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.813331 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fea678fb-af98-424b-9231-32d6991910a3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.813972 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.815589 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.816174 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.816288 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.818728 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.821425 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f"] Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.865432 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.865502 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.865604 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz6lx\" (UniqueName: \"kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.966761 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.966869 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.966973 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz6lx\" (UniqueName: \"kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.971950 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.977637 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:25 crc kubenswrapper[4603]: I0930 20:19:25.991007 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz6lx\" (UniqueName: \"kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-97w2f\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:26 crc kubenswrapper[4603]: I0930 20:19:26.135842 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:19:26 crc kubenswrapper[4603]: I0930 20:19:26.736690 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f"] Sep 30 20:19:26 crc kubenswrapper[4603]: W0930 20:19:26.740735 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb85ce012_d065_4005_9bbd_7bebe194cb45.slice/crio-5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160 WatchSource:0}: Error finding container 5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160: Status 404 returned error can't find the container with id 5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160 Sep 30 20:19:27 crc kubenswrapper[4603]: I0930 20:19:27.689313 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" event={"ID":"b85ce012-d065-4005-9bbd-7bebe194cb45","Type":"ContainerStarted","Data":"27dcf0e849c46660b6af11f50fc3d92222852fc0ffbd6f1e6eb58e8e93b9b8ec"} Sep 30 20:19:27 crc kubenswrapper[4603]: I0930 20:19:27.689998 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" event={"ID":"b85ce012-d065-4005-9bbd-7bebe194cb45","Type":"ContainerStarted","Data":"5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160"} Sep 30 20:19:27 crc kubenswrapper[4603]: I0930 20:19:27.722790 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" podStartSLOduration=2.546798525 podStartE2EDuration="2.722771409s" podCreationTimestamp="2025-09-30 20:19:25 +0000 UTC" firstStartedPulling="2025-09-30 20:19:26.743856093 +0000 UTC m=+1968.682314921" lastFinishedPulling="2025-09-30 20:19:26.919828987 +0000 UTC m=+1968.858287805" observedRunningTime="2025-09-30 20:19:27.714723185 +0000 UTC m=+1969.653182023" watchObservedRunningTime="2025-09-30 20:19:27.722771409 +0000 UTC m=+1969.661230247" Sep 30 20:19:37 crc kubenswrapper[4603]: I0930 20:19:37.050154 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rs88k"] Sep 30 20:19:37 crc kubenswrapper[4603]: I0930 20:19:37.061142 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rs88k"] Sep 30 20:19:38 crc kubenswrapper[4603]: I0930 20:19:38.441785 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:38 crc kubenswrapper[4603]: I0930 20:19:38.441856 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:19:38 crc kubenswrapper[4603]: I0930 20:19:38.814447 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8" path="/var/lib/kubelet/pods/177f227a-d8a5-4f3e-8ba5-74a9d6ebf3c8/volumes" Sep 30 20:19:45 crc kubenswrapper[4603]: I0930 20:19:45.995820 4603 scope.go:117] "RemoveContainer" containerID="f34e580619c851a1dd914b19fd9ef2e253f2fc43e51f7273612341ab1b9f05d8" Sep 30 20:19:46 crc kubenswrapper[4603]: I0930 20:19:46.033862 4603 scope.go:117] "RemoveContainer" containerID="17a827c150a76ea6ef396ff0efb6cd39888bde6a4be127c234fafcbe37d1b3f6" Sep 30 20:19:46 crc kubenswrapper[4603]: I0930 20:19:46.087332 4603 scope.go:117] "RemoveContainer" containerID="11968f3e62894d2d51facf2e5f013b5bb7a5ef9b63bfcf80db93b19386ac53b7" Sep 30 20:20:08 crc kubenswrapper[4603]: I0930 20:20:08.441126 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:20:08 crc kubenswrapper[4603]: I0930 20:20:08.441700 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:20:08 crc kubenswrapper[4603]: I0930 20:20:08.441744 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:20:08 crc kubenswrapper[4603]: I0930 20:20:08.442442 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:20:08 crc kubenswrapper[4603]: I0930 20:20:08.442497 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e" gracePeriod=600 Sep 30 20:20:09 crc kubenswrapper[4603]: I0930 20:20:09.114474 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e" exitCode=0 Sep 30 20:20:09 crc kubenswrapper[4603]: I0930 20:20:09.114806 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e"} Sep 30 20:20:09 crc kubenswrapper[4603]: I0930 20:20:09.114831 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976"} Sep 30 20:20:09 crc kubenswrapper[4603]: I0930 20:20:09.114846 4603 scope.go:117] "RemoveContainer" containerID="8a8ebd9f48564585074845cc2b5c8af9c99de5d3a805dea66706b8e087328576" Sep 30 20:20:26 crc kubenswrapper[4603]: I0930 20:20:26.272982 4603 generic.go:334] "Generic (PLEG): container finished" podID="b85ce012-d065-4005-9bbd-7bebe194cb45" containerID="27dcf0e849c46660b6af11f50fc3d92222852fc0ffbd6f1e6eb58e8e93b9b8ec" exitCode=2 Sep 30 20:20:26 crc kubenswrapper[4603]: I0930 20:20:26.273115 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" event={"ID":"b85ce012-d065-4005-9bbd-7bebe194cb45","Type":"ContainerDied","Data":"27dcf0e849c46660b6af11f50fc3d92222852fc0ffbd6f1e6eb58e8e93b9b8ec"} Sep 30 20:20:27 crc kubenswrapper[4603]: I0930 20:20:27.902832 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.027517 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key\") pod \"b85ce012-d065-4005-9bbd-7bebe194cb45\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.027705 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz6lx\" (UniqueName: \"kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx\") pod \"b85ce012-d065-4005-9bbd-7bebe194cb45\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.027830 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory\") pod \"b85ce012-d065-4005-9bbd-7bebe194cb45\" (UID: \"b85ce012-d065-4005-9bbd-7bebe194cb45\") " Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.037055 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx" (OuterVolumeSpecName: "kube-api-access-pz6lx") pod "b85ce012-d065-4005-9bbd-7bebe194cb45" (UID: "b85ce012-d065-4005-9bbd-7bebe194cb45"). InnerVolumeSpecName "kube-api-access-pz6lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.061520 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory" (OuterVolumeSpecName: "inventory") pod "b85ce012-d065-4005-9bbd-7bebe194cb45" (UID: "b85ce012-d065-4005-9bbd-7bebe194cb45"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.063158 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b85ce012-d065-4005-9bbd-7bebe194cb45" (UID: "b85ce012-d065-4005-9bbd-7bebe194cb45"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.130989 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.131031 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz6lx\" (UniqueName: \"kubernetes.io/projected/b85ce012-d065-4005-9bbd-7bebe194cb45-kube-api-access-pz6lx\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.131086 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b85ce012-d065-4005-9bbd-7bebe194cb45-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.297531 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" event={"ID":"b85ce012-d065-4005-9bbd-7bebe194cb45","Type":"ContainerDied","Data":"5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160"} Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.297579 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-97w2f" Sep 30 20:20:28 crc kubenswrapper[4603]: I0930 20:20:28.297581 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f082440cdcb28e72f8889033c2e7a520af9b73040591bc520545abc1ab7e160" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.034013 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx"] Sep 30 20:20:35 crc kubenswrapper[4603]: E0930 20:20:35.035505 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b85ce012-d065-4005-9bbd-7bebe194cb45" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.035526 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="b85ce012-d065-4005-9bbd-7bebe194cb45" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.035777 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="b85ce012-d065-4005-9bbd-7bebe194cb45" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.036541 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.039207 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.041741 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.055544 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx"] Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.055723 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.055830 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.173631 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.173796 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6xrv\" (UniqueName: \"kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.173868 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.275801 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.275913 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6xrv\" (UniqueName: \"kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.275977 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.282123 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.282210 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.308745 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6xrv\" (UniqueName: \"kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.356942 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.896534 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx"] Sep 30 20:20:35 crc kubenswrapper[4603]: W0930 20:20:35.918358 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod260c57d2_7dcf_404e_83c2_64a074939299.slice/crio-3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8 WatchSource:0}: Error finding container 3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8: Status 404 returned error can't find the container with id 3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8 Sep 30 20:20:35 crc kubenswrapper[4603]: I0930 20:20:35.920987 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.320641 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.326619 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.338803 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.406706 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" event={"ID":"260c57d2-7dcf-404e-83c2-64a074939299","Type":"ContainerStarted","Data":"86728e1acb66cd25a35ab310a393a23cfc40622aec9eeeb30311415e041f9eec"} Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.406780 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" event={"ID":"260c57d2-7dcf-404e-83c2-64a074939299","Type":"ContainerStarted","Data":"3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8"} Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.437512 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" podStartSLOduration=1.255697629 podStartE2EDuration="1.437495305s" podCreationTimestamp="2025-09-30 20:20:35 +0000 UTC" firstStartedPulling="2025-09-30 20:20:35.920715593 +0000 UTC m=+2037.859174431" lastFinishedPulling="2025-09-30 20:20:36.102513289 +0000 UTC m=+2038.040972107" observedRunningTime="2025-09-30 20:20:36.433576307 +0000 UTC m=+2038.372035115" watchObservedRunningTime="2025-09-30 20:20:36.437495305 +0000 UTC m=+2038.375954123" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.503002 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlfqx\" (UniqueName: \"kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.503106 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.503458 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.605374 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.605472 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlfqx\" (UniqueName: \"kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.605521 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.605943 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.606011 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.629219 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlfqx\" (UniqueName: \"kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx\") pod \"redhat-operators-jg8qh\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:36 crc kubenswrapper[4603]: I0930 20:20:36.655767 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:37 crc kubenswrapper[4603]: I0930 20:20:37.122278 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:37 crc kubenswrapper[4603]: I0930 20:20:37.417255 4603 generic.go:334] "Generic (PLEG): container finished" podID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerID="ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7" exitCode=0 Sep 30 20:20:37 crc kubenswrapper[4603]: I0930 20:20:37.418727 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerDied","Data":"ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7"} Sep 30 20:20:37 crc kubenswrapper[4603]: I0930 20:20:37.418754 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerStarted","Data":"a63a9062a90f1916460f51ac77d8a40ef8a3795ea8c2c4a4f16473a394ae0adb"} Sep 30 20:20:39 crc kubenswrapper[4603]: I0930 20:20:39.439821 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerStarted","Data":"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7"} Sep 30 20:20:42 crc kubenswrapper[4603]: I0930 20:20:42.467037 4603 generic.go:334] "Generic (PLEG): container finished" podID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerID="f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7" exitCode=0 Sep 30 20:20:42 crc kubenswrapper[4603]: I0930 20:20:42.467126 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerDied","Data":"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7"} Sep 30 20:20:43 crc kubenswrapper[4603]: I0930 20:20:43.480445 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerStarted","Data":"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999"} Sep 30 20:20:46 crc kubenswrapper[4603]: I0930 20:20:46.656735 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:46 crc kubenswrapper[4603]: I0930 20:20:46.658323 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:47 crc kubenswrapper[4603]: I0930 20:20:47.705869 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jg8qh" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="registry-server" probeResult="failure" output=< Sep 30 20:20:47 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:20:47 crc kubenswrapper[4603]: > Sep 30 20:20:56 crc kubenswrapper[4603]: I0930 20:20:56.754009 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:56 crc kubenswrapper[4603]: I0930 20:20:56.791917 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jg8qh" podStartSLOduration=15.288488365 podStartE2EDuration="20.791892986s" podCreationTimestamp="2025-09-30 20:20:36 +0000 UTC" firstStartedPulling="2025-09-30 20:20:37.420084863 +0000 UTC m=+2039.358543681" lastFinishedPulling="2025-09-30 20:20:42.923489484 +0000 UTC m=+2044.861948302" observedRunningTime="2025-09-30 20:20:43.500501282 +0000 UTC m=+2045.438960110" watchObservedRunningTime="2025-09-30 20:20:56.791892986 +0000 UTC m=+2058.730351814" Sep 30 20:20:56 crc kubenswrapper[4603]: I0930 20:20:56.835887 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:57 crc kubenswrapper[4603]: I0930 20:20:57.007501 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:58 crc kubenswrapper[4603]: I0930 20:20:58.615775 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jg8qh" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="registry-server" containerID="cri-o://125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999" gracePeriod=2 Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.024660 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.135958 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities\") pod \"dc7bb6de-b045-49d8-af69-4097547cd91b\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.136069 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlfqx\" (UniqueName: \"kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx\") pod \"dc7bb6de-b045-49d8-af69-4097547cd91b\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.136150 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content\") pod \"dc7bb6de-b045-49d8-af69-4097547cd91b\" (UID: \"dc7bb6de-b045-49d8-af69-4097547cd91b\") " Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.137222 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities" (OuterVolumeSpecName: "utilities") pod "dc7bb6de-b045-49d8-af69-4097547cd91b" (UID: "dc7bb6de-b045-49d8-af69-4097547cd91b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.158374 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx" (OuterVolumeSpecName: "kube-api-access-zlfqx") pod "dc7bb6de-b045-49d8-af69-4097547cd91b" (UID: "dc7bb6de-b045-49d8-af69-4097547cd91b"). InnerVolumeSpecName "kube-api-access-zlfqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.226008 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc7bb6de-b045-49d8-af69-4097547cd91b" (UID: "dc7bb6de-b045-49d8-af69-4097547cd91b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.238738 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.238792 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlfqx\" (UniqueName: \"kubernetes.io/projected/dc7bb6de-b045-49d8-af69-4097547cd91b-kube-api-access-zlfqx\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.238807 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc7bb6de-b045-49d8-af69-4097547cd91b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.628081 4603 generic.go:334] "Generic (PLEG): container finished" podID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerID="125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999" exitCode=0 Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.628139 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerDied","Data":"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999"} Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.628497 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jg8qh" event={"ID":"dc7bb6de-b045-49d8-af69-4097547cd91b","Type":"ContainerDied","Data":"a63a9062a90f1916460f51ac77d8a40ef8a3795ea8c2c4a4f16473a394ae0adb"} Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.628523 4603 scope.go:117] "RemoveContainer" containerID="125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.628223 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jg8qh" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.676782 4603 scope.go:117] "RemoveContainer" containerID="f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.692023 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.705131 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jg8qh"] Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.720047 4603 scope.go:117] "RemoveContainer" containerID="ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.768124 4603 scope.go:117] "RemoveContainer" containerID="125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999" Sep 30 20:20:59 crc kubenswrapper[4603]: E0930 20:20:59.768862 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999\": container with ID starting with 125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999 not found: ID does not exist" containerID="125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.768965 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999"} err="failed to get container status \"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999\": rpc error: code = NotFound desc = could not find container \"125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999\": container with ID starting with 125d4ed2c917c3eb04b496d0dfda42705447efd30a28ec3ed6118e2660bcf999 not found: ID does not exist" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.769007 4603 scope.go:117] "RemoveContainer" containerID="f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7" Sep 30 20:20:59 crc kubenswrapper[4603]: E0930 20:20:59.769593 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7\": container with ID starting with f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7 not found: ID does not exist" containerID="f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.769628 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7"} err="failed to get container status \"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7\": rpc error: code = NotFound desc = could not find container \"f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7\": container with ID starting with f91b2a2b6ceba4412819f8eb44dfd562eb93fb2580f4666a6105aaba9c0aa3a7 not found: ID does not exist" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.769649 4603 scope.go:117] "RemoveContainer" containerID="ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7" Sep 30 20:20:59 crc kubenswrapper[4603]: E0930 20:20:59.770595 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7\": container with ID starting with ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7 not found: ID does not exist" containerID="ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7" Sep 30 20:20:59 crc kubenswrapper[4603]: I0930 20:20:59.770667 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7"} err="failed to get container status \"ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7\": rpc error: code = NotFound desc = could not find container \"ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7\": container with ID starting with ac64c9bd3952fea476cb646ab9004ffb9409e543eb395b9eacea87d1f406c4a7 not found: ID does not exist" Sep 30 20:21:00 crc kubenswrapper[4603]: I0930 20:21:00.783237 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" path="/var/lib/kubelet/pods/dc7bb6de-b045-49d8-af69-4097547cd91b/volumes" Sep 30 20:21:30 crc kubenswrapper[4603]: I0930 20:21:30.971544 4603 generic.go:334] "Generic (PLEG): container finished" podID="260c57d2-7dcf-404e-83c2-64a074939299" containerID="86728e1acb66cd25a35ab310a393a23cfc40622aec9eeeb30311415e041f9eec" exitCode=0 Sep 30 20:21:30 crc kubenswrapper[4603]: I0930 20:21:30.971613 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" event={"ID":"260c57d2-7dcf-404e-83c2-64a074939299","Type":"ContainerDied","Data":"86728e1acb66cd25a35ab310a393a23cfc40622aec9eeeb30311415e041f9eec"} Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.470801 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.567359 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6xrv\" (UniqueName: \"kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv\") pod \"260c57d2-7dcf-404e-83c2-64a074939299\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.567593 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory\") pod \"260c57d2-7dcf-404e-83c2-64a074939299\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.567705 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key\") pod \"260c57d2-7dcf-404e-83c2-64a074939299\" (UID: \"260c57d2-7dcf-404e-83c2-64a074939299\") " Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.579394 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv" (OuterVolumeSpecName: "kube-api-access-w6xrv") pod "260c57d2-7dcf-404e-83c2-64a074939299" (UID: "260c57d2-7dcf-404e-83c2-64a074939299"). InnerVolumeSpecName "kube-api-access-w6xrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.602512 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory" (OuterVolumeSpecName: "inventory") pod "260c57d2-7dcf-404e-83c2-64a074939299" (UID: "260c57d2-7dcf-404e-83c2-64a074939299"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.605424 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "260c57d2-7dcf-404e-83c2-64a074939299" (UID: "260c57d2-7dcf-404e-83c2-64a074939299"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.670698 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6xrv\" (UniqueName: \"kubernetes.io/projected/260c57d2-7dcf-404e-83c2-64a074939299-kube-api-access-w6xrv\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.670748 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.670767 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/260c57d2-7dcf-404e-83c2-64a074939299-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.994664 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" event={"ID":"260c57d2-7dcf-404e-83c2-64a074939299","Type":"ContainerDied","Data":"3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8"} Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.994703 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3814fb0829ff1a1c7353c763f792b1a01e29bd95de82f20834677c96d7ebc6a8" Sep 30 20:21:32 crc kubenswrapper[4603]: I0930 20:21:32.994731 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.104534 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4tcb9"] Sep 30 20:21:33 crc kubenswrapper[4603]: E0930 20:21:33.104912 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="260c57d2-7dcf-404e-83c2-64a074939299" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.104929 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="260c57d2-7dcf-404e-83c2-64a074939299" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:21:33 crc kubenswrapper[4603]: E0930 20:21:33.104947 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="registry-server" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.104965 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="registry-server" Sep 30 20:21:33 crc kubenswrapper[4603]: E0930 20:21:33.104975 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="extract-content" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.104981 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="extract-content" Sep 30 20:21:33 crc kubenswrapper[4603]: E0930 20:21:33.104998 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="extract-utilities" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.105004 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="extract-utilities" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.105195 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7bb6de-b045-49d8-af69-4097547cd91b" containerName="registry-server" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.105216 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="260c57d2-7dcf-404e-83c2-64a074939299" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.105771 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.107264 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.108760 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.109190 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.109229 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.118792 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4tcb9"] Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.281772 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.282076 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96lkt\" (UniqueName: \"kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.282114 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.384194 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.384258 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96lkt\" (UniqueName: \"kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.384292 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.397733 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.401874 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96lkt\" (UniqueName: \"kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.407151 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4tcb9\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:33 crc kubenswrapper[4603]: I0930 20:21:33.424155 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:34 crc kubenswrapper[4603]: I0930 20:21:34.000084 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4tcb9"] Sep 30 20:21:34 crc kubenswrapper[4603]: W0930 20:21:34.001597 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8cf9e0f_89a8_4107_b6eb_3adc4978c983.slice/crio-3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d WatchSource:0}: Error finding container 3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d: Status 404 returned error can't find the container with id 3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d Sep 30 20:21:35 crc kubenswrapper[4603]: I0930 20:21:35.017704 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" event={"ID":"e8cf9e0f-89a8-4107-b6eb-3adc4978c983","Type":"ContainerStarted","Data":"3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d"} Sep 30 20:21:36 crc kubenswrapper[4603]: I0930 20:21:36.025961 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" event={"ID":"e8cf9e0f-89a8-4107-b6eb-3adc4978c983","Type":"ContainerStarted","Data":"79578ba561492d1af0d6b03787d1313642ea7591592d6fba2b2bd0795f7d2725"} Sep 30 20:21:45 crc kubenswrapper[4603]: I0930 20:21:45.113468 4603 generic.go:334] "Generic (PLEG): container finished" podID="e8cf9e0f-89a8-4107-b6eb-3adc4978c983" containerID="79578ba561492d1af0d6b03787d1313642ea7591592d6fba2b2bd0795f7d2725" exitCode=0 Sep 30 20:21:45 crc kubenswrapper[4603]: I0930 20:21:45.113553 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" event={"ID":"e8cf9e0f-89a8-4107-b6eb-3adc4978c983","Type":"ContainerDied","Data":"79578ba561492d1af0d6b03787d1313642ea7591592d6fba2b2bd0795f7d2725"} Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.564868 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.656767 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96lkt\" (UniqueName: \"kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt\") pod \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.656910 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam\") pod \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.657001 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0\") pod \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\" (UID: \"e8cf9e0f-89a8-4107-b6eb-3adc4978c983\") " Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.662735 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt" (OuterVolumeSpecName: "kube-api-access-96lkt") pod "e8cf9e0f-89a8-4107-b6eb-3adc4978c983" (UID: "e8cf9e0f-89a8-4107-b6eb-3adc4978c983"). InnerVolumeSpecName "kube-api-access-96lkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.691297 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "e8cf9e0f-89a8-4107-b6eb-3adc4978c983" (UID: "e8cf9e0f-89a8-4107-b6eb-3adc4978c983"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.691353 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e8cf9e0f-89a8-4107-b6eb-3adc4978c983" (UID: "e8cf9e0f-89a8-4107-b6eb-3adc4978c983"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.759674 4603 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.759712 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96lkt\" (UniqueName: \"kubernetes.io/projected/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-kube-api-access-96lkt\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:46 crc kubenswrapper[4603]: I0930 20:21:46.759729 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e8cf9e0f-89a8-4107-b6eb-3adc4978c983-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.133137 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" event={"ID":"e8cf9e0f-89a8-4107-b6eb-3adc4978c983","Type":"ContainerDied","Data":"3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d"} Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.133223 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ad57a1edc357d2221010ecf180e581718229c982f56e64189954e4bb4c1bb4d" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.133311 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4tcb9" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.244205 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z"] Sep 30 20:21:47 crc kubenswrapper[4603]: E0930 20:21:47.244786 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8cf9e0f-89a8-4107-b6eb-3adc4978c983" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.244817 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8cf9e0f-89a8-4107-b6eb-3adc4978c983" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.245141 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8cf9e0f-89a8-4107-b6eb-3adc4978c983" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.246204 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.249871 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.250256 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.251739 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.252005 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.254852 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z"] Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.385674 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.385859 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d4sv\" (UniqueName: \"kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.386108 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.488385 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.488470 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.488636 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d4sv\" (UniqueName: \"kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.492828 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.499509 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.538141 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d4sv\" (UniqueName: \"kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wmv5z\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:47 crc kubenswrapper[4603]: I0930 20:21:47.562357 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:21:48 crc kubenswrapper[4603]: I0930 20:21:48.138529 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z"] Sep 30 20:21:49 crc kubenswrapper[4603]: I0930 20:21:49.164926 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" event={"ID":"aff20c40-8319-4474-970b-9e7d3a672838","Type":"ContainerStarted","Data":"64522bc93fdbb0296f8cfab19ecb708040a017839e8468c91fbd9b1a82774762"} Sep 30 20:21:50 crc kubenswrapper[4603]: I0930 20:21:50.175808 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" event={"ID":"aff20c40-8319-4474-970b-9e7d3a672838","Type":"ContainerStarted","Data":"1953652bd2b9d1c2d6698e02afba2a6935df57d3b832f8e7a126d1d568b59a57"} Sep 30 20:21:50 crc kubenswrapper[4603]: I0930 20:21:50.190975 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" podStartSLOduration=2.43951182 podStartE2EDuration="3.190953529s" podCreationTimestamp="2025-09-30 20:21:47 +0000 UTC" firstStartedPulling="2025-09-30 20:21:48.149921873 +0000 UTC m=+2110.088380691" lastFinishedPulling="2025-09-30 20:21:48.901363582 +0000 UTC m=+2110.839822400" observedRunningTime="2025-09-30 20:21:50.189761605 +0000 UTC m=+2112.128220503" watchObservedRunningTime="2025-09-30 20:21:50.190953529 +0000 UTC m=+2112.129412357" Sep 30 20:21:59 crc kubenswrapper[4603]: I0930 20:21:59.279597 4603 generic.go:334] "Generic (PLEG): container finished" podID="aff20c40-8319-4474-970b-9e7d3a672838" containerID="1953652bd2b9d1c2d6698e02afba2a6935df57d3b832f8e7a126d1d568b59a57" exitCode=0 Sep 30 20:21:59 crc kubenswrapper[4603]: I0930 20:21:59.279673 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" event={"ID":"aff20c40-8319-4474-970b-9e7d3a672838","Type":"ContainerDied","Data":"1953652bd2b9d1c2d6698e02afba2a6935df57d3b832f8e7a126d1d568b59a57"} Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.804399 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.845797 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory\") pod \"aff20c40-8319-4474-970b-9e7d3a672838\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.845987 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key\") pod \"aff20c40-8319-4474-970b-9e7d3a672838\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.846106 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4sv\" (UniqueName: \"kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv\") pod \"aff20c40-8319-4474-970b-9e7d3a672838\" (UID: \"aff20c40-8319-4474-970b-9e7d3a672838\") " Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.863856 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv" (OuterVolumeSpecName: "kube-api-access-2d4sv") pod "aff20c40-8319-4474-970b-9e7d3a672838" (UID: "aff20c40-8319-4474-970b-9e7d3a672838"). InnerVolumeSpecName "kube-api-access-2d4sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.873394 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aff20c40-8319-4474-970b-9e7d3a672838" (UID: "aff20c40-8319-4474-970b-9e7d3a672838"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.876305 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory" (OuterVolumeSpecName: "inventory") pod "aff20c40-8319-4474-970b-9e7d3a672838" (UID: "aff20c40-8319-4474-970b-9e7d3a672838"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.948581 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.948765 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4sv\" (UniqueName: \"kubernetes.io/projected/aff20c40-8319-4474-970b-9e7d3a672838-kube-api-access-2d4sv\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:00 crc kubenswrapper[4603]: I0930 20:22:00.948852 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aff20c40-8319-4474-970b-9e7d3a672838-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.297237 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" event={"ID":"aff20c40-8319-4474-970b-9e7d3a672838","Type":"ContainerDied","Data":"64522bc93fdbb0296f8cfab19ecb708040a017839e8468c91fbd9b1a82774762"} Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.297298 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64522bc93fdbb0296f8cfab19ecb708040a017839e8468c91fbd9b1a82774762" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.297315 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wmv5z" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.411291 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7"] Sep 30 20:22:01 crc kubenswrapper[4603]: E0930 20:22:01.412250 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aff20c40-8319-4474-970b-9e7d3a672838" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.412296 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="aff20c40-8319-4474-970b-9e7d3a672838" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.412645 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="aff20c40-8319-4474-970b-9e7d3a672838" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.413735 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.416036 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.416419 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.421090 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.421448 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.422830 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7"] Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.458224 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.458363 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvvlc\" (UniqueName: \"kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.458403 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.560119 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.560502 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvvlc\" (UniqueName: \"kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.560601 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.565666 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.576548 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvvlc\" (UniqueName: \"kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.577663 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:01 crc kubenswrapper[4603]: I0930 20:22:01.738336 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:02 crc kubenswrapper[4603]: I0930 20:22:02.307766 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7"] Sep 30 20:22:03 crc kubenswrapper[4603]: I0930 20:22:03.318795 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" event={"ID":"7b095899-7ded-4255-b88c-078c4e4f4d51","Type":"ContainerStarted","Data":"91d7ba89b8b1df7d7725c0844c3dd19321404a0bfcd0e867cb2bea04860c4ccc"} Sep 30 20:22:03 crc kubenswrapper[4603]: I0930 20:22:03.319706 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" event={"ID":"7b095899-7ded-4255-b88c-078c4e4f4d51","Type":"ContainerStarted","Data":"44ae70ad8d674540263a76c7ec88efaa2c10f7f1827d3b5b0dca39bafcdc8d72"} Sep 30 20:22:03 crc kubenswrapper[4603]: I0930 20:22:03.344513 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" podStartSLOduration=2.088126445 podStartE2EDuration="2.344495595s" podCreationTimestamp="2025-09-30 20:22:01 +0000 UTC" firstStartedPulling="2025-09-30 20:22:02.312641917 +0000 UTC m=+2124.251100735" lastFinishedPulling="2025-09-30 20:22:02.569011067 +0000 UTC m=+2124.507469885" observedRunningTime="2025-09-30 20:22:03.334672642 +0000 UTC m=+2125.273131490" watchObservedRunningTime="2025-09-30 20:22:03.344495595 +0000 UTC m=+2125.282954413" Sep 30 20:22:08 crc kubenswrapper[4603]: I0930 20:22:08.441310 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:22:08 crc kubenswrapper[4603]: I0930 20:22:08.441858 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:22:13 crc kubenswrapper[4603]: I0930 20:22:13.420690 4603 generic.go:334] "Generic (PLEG): container finished" podID="7b095899-7ded-4255-b88c-078c4e4f4d51" containerID="91d7ba89b8b1df7d7725c0844c3dd19321404a0bfcd0e867cb2bea04860c4ccc" exitCode=0 Sep 30 20:22:13 crc kubenswrapper[4603]: I0930 20:22:13.420812 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" event={"ID":"7b095899-7ded-4255-b88c-078c4e4f4d51","Type":"ContainerDied","Data":"91d7ba89b8b1df7d7725c0844c3dd19321404a0bfcd0e867cb2bea04860c4ccc"} Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.882290 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.928727 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory\") pod \"7b095899-7ded-4255-b88c-078c4e4f4d51\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.928986 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvvlc\" (UniqueName: \"kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc\") pod \"7b095899-7ded-4255-b88c-078c4e4f4d51\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.929205 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key\") pod \"7b095899-7ded-4255-b88c-078c4e4f4d51\" (UID: \"7b095899-7ded-4255-b88c-078c4e4f4d51\") " Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.940038 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc" (OuterVolumeSpecName: "kube-api-access-fvvlc") pod "7b095899-7ded-4255-b88c-078c4e4f4d51" (UID: "7b095899-7ded-4255-b88c-078c4e4f4d51"). InnerVolumeSpecName "kube-api-access-fvvlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.955503 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory" (OuterVolumeSpecName: "inventory") pod "7b095899-7ded-4255-b88c-078c4e4f4d51" (UID: "7b095899-7ded-4255-b88c-078c4e4f4d51"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:22:14 crc kubenswrapper[4603]: I0930 20:22:14.957818 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7b095899-7ded-4255-b88c-078c4e4f4d51" (UID: "7b095899-7ded-4255-b88c-078c4e4f4d51"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.031990 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.032291 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvvlc\" (UniqueName: \"kubernetes.io/projected/7b095899-7ded-4255-b88c-078c4e4f4d51-kube-api-access-fvvlc\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.032394 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b095899-7ded-4255-b88c-078c4e4f4d51-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.442333 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" event={"ID":"7b095899-7ded-4255-b88c-078c4e4f4d51","Type":"ContainerDied","Data":"44ae70ad8d674540263a76c7ec88efaa2c10f7f1827d3b5b0dca39bafcdc8d72"} Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.442368 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.442382 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44ae70ad8d674540263a76c7ec88efaa2c10f7f1827d3b5b0dca39bafcdc8d72" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.563753 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq"] Sep 30 20:22:15 crc kubenswrapper[4603]: E0930 20:22:15.564207 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b095899-7ded-4255-b88c-078c4e4f4d51" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.564229 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b095899-7ded-4255-b88c-078c4e4f4d51" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.564479 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b095899-7ded-4255-b88c-078c4e4f4d51" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.565235 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.568591 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.568776 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.569092 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.571652 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.571954 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.572276 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.572491 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.572651 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.624811 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq"] Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.646354 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.646652 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.646734 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.646847 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.646960 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647059 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647192 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647293 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647414 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647548 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxw6x\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647736 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647832 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.647913 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.648022 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.749820 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750143 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750348 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750506 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750619 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750736 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.750888 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.751395 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.751524 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.751638 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.751770 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.751911 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxw6x\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.752073 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.753390 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.755788 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.756251 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.756312 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.757161 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.759273 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.759806 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.760231 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.760547 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.761613 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.764950 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.767398 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.767482 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.769396 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.773333 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxw6x\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hndqq\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:15 crc kubenswrapper[4603]: I0930 20:22:15.886490 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:22:16 crc kubenswrapper[4603]: I0930 20:22:16.426089 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq"] Sep 30 20:22:16 crc kubenswrapper[4603]: I0930 20:22:16.455740 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" event={"ID":"f4f9e63e-bace-4185-a45a-cbc16d4be310","Type":"ContainerStarted","Data":"5b5d930cb6c976270f8d51b7e9adff2b4cb91be38ea08ef0270002cb8c46dfeb"} Sep 30 20:22:17 crc kubenswrapper[4603]: I0930 20:22:17.467084 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" event={"ID":"f4f9e63e-bace-4185-a45a-cbc16d4be310","Type":"ContainerStarted","Data":"cf5630fea912392c123f53e98a363c5b1400660a0a85918b0a22b61c59d705ea"} Sep 30 20:22:17 crc kubenswrapper[4603]: I0930 20:22:17.490020 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" podStartSLOduration=2.256135778 podStartE2EDuration="2.489996252s" podCreationTimestamp="2025-09-30 20:22:15 +0000 UTC" firstStartedPulling="2025-09-30 20:22:16.437396777 +0000 UTC m=+2138.375855595" lastFinishedPulling="2025-09-30 20:22:16.671257261 +0000 UTC m=+2138.609716069" observedRunningTime="2025-09-30 20:22:17.485103126 +0000 UTC m=+2139.423561944" watchObservedRunningTime="2025-09-30 20:22:17.489996252 +0000 UTC m=+2139.428455080" Sep 30 20:22:38 crc kubenswrapper[4603]: I0930 20:22:38.441700 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:22:38 crc kubenswrapper[4603]: I0930 20:22:38.442479 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.570282 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.573348 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.581857 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.683250 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.683448 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.683585 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9lt2\" (UniqueName: \"kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.797213 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.797333 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.797370 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9lt2\" (UniqueName: \"kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.799542 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.800551 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.822094 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9lt2\" (UniqueName: \"kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2\") pod \"redhat-marketplace-ndgh6\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:52 crc kubenswrapper[4603]: I0930 20:22:52.892293 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:22:53 crc kubenswrapper[4603]: I0930 20:22:53.375961 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:22:53 crc kubenswrapper[4603]: I0930 20:22:53.835247 4603 generic.go:334] "Generic (PLEG): container finished" podID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerID="07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c" exitCode=0 Sep 30 20:22:53 crc kubenswrapper[4603]: I0930 20:22:53.835572 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerDied","Data":"07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c"} Sep 30 20:22:53 crc kubenswrapper[4603]: I0930 20:22:53.835613 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerStarted","Data":"43f25e9185885faaba1ffffdc2511128a1c6aef568abf65ddb4b6b5cc1b19e01"} Sep 30 20:22:55 crc kubenswrapper[4603]: I0930 20:22:55.853493 4603 generic.go:334] "Generic (PLEG): container finished" podID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerID="5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b" exitCode=0 Sep 30 20:22:55 crc kubenswrapper[4603]: I0930 20:22:55.854061 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerDied","Data":"5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b"} Sep 30 20:22:56 crc kubenswrapper[4603]: I0930 20:22:56.869987 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerStarted","Data":"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d"} Sep 30 20:22:56 crc kubenswrapper[4603]: I0930 20:22:56.904134 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ndgh6" podStartSLOduration=2.428412737 podStartE2EDuration="4.904114022s" podCreationTimestamp="2025-09-30 20:22:52 +0000 UTC" firstStartedPulling="2025-09-30 20:22:53.83775853 +0000 UTC m=+2175.776217378" lastFinishedPulling="2025-09-30 20:22:56.313459845 +0000 UTC m=+2178.251918663" observedRunningTime="2025-09-30 20:22:56.894436654 +0000 UTC m=+2178.832895472" watchObservedRunningTime="2025-09-30 20:22:56.904114022 +0000 UTC m=+2178.842572840" Sep 30 20:23:01 crc kubenswrapper[4603]: I0930 20:23:01.916646 4603 generic.go:334] "Generic (PLEG): container finished" podID="f4f9e63e-bace-4185-a45a-cbc16d4be310" containerID="cf5630fea912392c123f53e98a363c5b1400660a0a85918b0a22b61c59d705ea" exitCode=0 Sep 30 20:23:01 crc kubenswrapper[4603]: I0930 20:23:01.917151 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" event={"ID":"f4f9e63e-bace-4185-a45a-cbc16d4be310","Type":"ContainerDied","Data":"cf5630fea912392c123f53e98a363c5b1400660a0a85918b0a22b61c59d705ea"} Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.610282 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.615301 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.627958 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.703818 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfm2x\" (UniqueName: \"kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.703901 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.704079 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.805905 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.806094 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfm2x\" (UniqueName: \"kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.806306 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.806474 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.806922 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.834280 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfm2x\" (UniqueName: \"kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x\") pod \"community-operators-v8srs\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.892581 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.892869 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.939899 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:02 crc kubenswrapper[4603]: I0930 20:23:02.976876 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.541231 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.628817 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.628896 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.628931 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.629008 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxw6x\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.629087 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.629193 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.629239 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630374 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630419 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630499 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630542 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630586 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630609 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.630634 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key\") pod \"f4f9e63e-bace-4185-a45a-cbc16d4be310\" (UID: \"f4f9e63e-bace-4185-a45a-cbc16d4be310\") " Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.635563 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.635559 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.637685 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x" (OuterVolumeSpecName: "kube-api-access-cxw6x") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "kube-api-access-cxw6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.638301 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.640216 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.640783 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.640999 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.641086 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.643260 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.643784 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.644192 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.646961 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.648733 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.672314 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory" (OuterVolumeSpecName: "inventory") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.686591 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f4f9e63e-bace-4185-a45a-cbc16d4be310" (UID: "f4f9e63e-bace-4185-a45a-cbc16d4be310"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.731762 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732282 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732351 4603 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732413 4603 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732503 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732572 4603 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732630 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732695 4603 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732753 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxw6x\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-kube-api-access-cxw6x\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732806 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4f9e63e-bace-4185-a45a-cbc16d4be310-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732865 4603 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732920 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.732998 4603 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.733079 4603 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4f9e63e-bace-4185-a45a-cbc16d4be310-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.939549 4603 generic.go:334] "Generic (PLEG): container finished" podID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerID="6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f" exitCode=0 Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.940294 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerDied","Data":"6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f"} Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.940323 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerStarted","Data":"55764a44a9615e14039b00e10c7ec26cd5c6d079b459272c02177a94933a7d0a"} Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.944816 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.944868 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hndqq" event={"ID":"f4f9e63e-bace-4185-a45a-cbc16d4be310","Type":"ContainerDied","Data":"5b5d930cb6c976270f8d51b7e9adff2b4cb91be38ea08ef0270002cb8c46dfeb"} Sep 30 20:23:03 crc kubenswrapper[4603]: I0930 20:23:03.944941 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b5d930cb6c976270f8d51b7e9adff2b4cb91be38ea08ef0270002cb8c46dfeb" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.007477 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.065523 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt"] Sep 30 20:23:04 crc kubenswrapper[4603]: E0930 20:23:04.066130 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f9e63e-bace-4185-a45a-cbc16d4be310" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.066227 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f9e63e-bace-4185-a45a-cbc16d4be310" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.066455 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f9e63e-bace-4185-a45a-cbc16d4be310" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.067154 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.071813 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.072035 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.072230 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.072355 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.072518 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.076366 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt"] Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.142566 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.142871 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.142901 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.142943 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.142986 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg2g9\" (UniqueName: \"kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.244256 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.244328 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.244394 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.244450 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg2g9\" (UniqueName: \"kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.244534 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.245388 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.249657 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.250254 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.250767 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.261589 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg2g9\" (UniqueName: \"kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-gdhjt\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.393346 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:23:04 crc kubenswrapper[4603]: W0930 20:23:04.955637 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cd14dc_05cd_4a02_adde_bd6cc7b55643.slice/crio-f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310 WatchSource:0}: Error finding container f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310: Status 404 returned error can't find the container with id f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310 Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.963637 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerStarted","Data":"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c"} Sep 30 20:23:04 crc kubenswrapper[4603]: I0930 20:23:04.964241 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt"] Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.374664 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.986879 4603 generic.go:334] "Generic (PLEG): container finished" podID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerID="a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c" exitCode=0 Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.987957 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerDied","Data":"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c"} Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.991133 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ndgh6" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="registry-server" containerID="cri-o://8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d" gracePeriod=2 Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.992247 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" event={"ID":"09cd14dc-05cd-4a02-adde-bd6cc7b55643","Type":"ContainerStarted","Data":"11127943c242731cf6aebf0948735aac09bac019b293b71ba31b74e25a4be904"} Sep 30 20:23:05 crc kubenswrapper[4603]: I0930 20:23:05.992296 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" event={"ID":"09cd14dc-05cd-4a02-adde-bd6cc7b55643","Type":"ContainerStarted","Data":"f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310"} Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.055339 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" podStartSLOduration=1.876118944 podStartE2EDuration="2.055303767s" podCreationTimestamp="2025-09-30 20:23:04 +0000 UTC" firstStartedPulling="2025-09-30 20:23:04.964253352 +0000 UTC m=+2186.902712170" lastFinishedPulling="2025-09-30 20:23:05.143438175 +0000 UTC m=+2187.081896993" observedRunningTime="2025-09-30 20:23:06.038993833 +0000 UTC m=+2187.977452661" watchObservedRunningTime="2025-09-30 20:23:06.055303767 +0000 UTC m=+2187.993762625" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.423000 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.503141 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities\") pod \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.503584 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9lt2\" (UniqueName: \"kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2\") pod \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.503685 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content\") pod \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\" (UID: \"9f7235f1-e7ba-47f5-a767-cae8cf01a08a\") " Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.506882 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities" (OuterVolumeSpecName: "utilities") pod "9f7235f1-e7ba-47f5-a767-cae8cf01a08a" (UID: "9f7235f1-e7ba-47f5-a767-cae8cf01a08a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.518415 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2" (OuterVolumeSpecName: "kube-api-access-q9lt2") pod "9f7235f1-e7ba-47f5-a767-cae8cf01a08a" (UID: "9f7235f1-e7ba-47f5-a767-cae8cf01a08a"). InnerVolumeSpecName "kube-api-access-q9lt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.520159 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f7235f1-e7ba-47f5-a767-cae8cf01a08a" (UID: "9f7235f1-e7ba-47f5-a767-cae8cf01a08a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.605726 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.605759 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9lt2\" (UniqueName: \"kubernetes.io/projected/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-kube-api-access-q9lt2\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:06 crc kubenswrapper[4603]: I0930 20:23:06.605769 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f7235f1-e7ba-47f5-a767-cae8cf01a08a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.003279 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerStarted","Data":"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c"} Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.007292 4603 generic.go:334] "Generic (PLEG): container finished" podID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerID="8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d" exitCode=0 Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.007349 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndgh6" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.007395 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerDied","Data":"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d"} Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.007474 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndgh6" event={"ID":"9f7235f1-e7ba-47f5-a767-cae8cf01a08a","Type":"ContainerDied","Data":"43f25e9185885faaba1ffffdc2511128a1c6aef568abf65ddb4b6b5cc1b19e01"} Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.007513 4603 scope.go:117] "RemoveContainer" containerID="8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.029557 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v8srs" podStartSLOduration=2.294651838 podStartE2EDuration="5.029538151s" podCreationTimestamp="2025-09-30 20:23:02 +0000 UTC" firstStartedPulling="2025-09-30 20:23:03.941027314 +0000 UTC m=+2185.879486132" lastFinishedPulling="2025-09-30 20:23:06.675913627 +0000 UTC m=+2188.614372445" observedRunningTime="2025-09-30 20:23:07.022200218 +0000 UTC m=+2188.960659026" watchObservedRunningTime="2025-09-30 20:23:07.029538151 +0000 UTC m=+2188.967996969" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.035240 4603 scope.go:117] "RemoveContainer" containerID="5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.050861 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.060329 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndgh6"] Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.062331 4603 scope.go:117] "RemoveContainer" containerID="07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.080430 4603 scope.go:117] "RemoveContainer" containerID="8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d" Sep 30 20:23:07 crc kubenswrapper[4603]: E0930 20:23:07.081016 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d\": container with ID starting with 8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d not found: ID does not exist" containerID="8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.081065 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d"} err="failed to get container status \"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d\": rpc error: code = NotFound desc = could not find container \"8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d\": container with ID starting with 8a8d971ab7772b95c337e7a7395800d17d4f6abe11cf34c444b0e0e4b124318d not found: ID does not exist" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.081090 4603 scope.go:117] "RemoveContainer" containerID="5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b" Sep 30 20:23:07 crc kubenswrapper[4603]: E0930 20:23:07.081631 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b\": container with ID starting with 5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b not found: ID does not exist" containerID="5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.081662 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b"} err="failed to get container status \"5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b\": rpc error: code = NotFound desc = could not find container \"5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b\": container with ID starting with 5f5008155bdd1d27c5591b67b0959602a1be1a8b1994617d825cfa5a381c605b not found: ID does not exist" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.081679 4603 scope.go:117] "RemoveContainer" containerID="07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c" Sep 30 20:23:07 crc kubenswrapper[4603]: E0930 20:23:07.082058 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c\": container with ID starting with 07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c not found: ID does not exist" containerID="07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c" Sep 30 20:23:07 crc kubenswrapper[4603]: I0930 20:23:07.082092 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c"} err="failed to get container status \"07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c\": rpc error: code = NotFound desc = could not find container \"07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c\": container with ID starting with 07f35c4c6b33cd444cea4f89792f7c4151a397368a44060a9b8c2d58b5c8210c not found: ID does not exist" Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.450570 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.450827 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.450885 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.451837 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.451952 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" gracePeriod=600 Sep 30 20:23:08 crc kubenswrapper[4603]: E0930 20:23:08.573249 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:23:08 crc kubenswrapper[4603]: I0930 20:23:08.774380 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" path="/var/lib/kubelet/pods/9f7235f1-e7ba-47f5-a767-cae8cf01a08a/volumes" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.046372 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" exitCode=0 Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.046472 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976"} Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.046711 4603 scope.go:117] "RemoveContainer" containerID="22930ca4117afa3acc57f12491a4d3bb2f191047f1295f93b865d61fac9f143e" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.047280 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:23:09 crc kubenswrapper[4603]: E0930 20:23:09.047510 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.790290 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:09 crc kubenswrapper[4603]: E0930 20:23:09.790848 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="extract-content" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.790871 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="extract-content" Sep 30 20:23:09 crc kubenswrapper[4603]: E0930 20:23:09.790912 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="extract-utilities" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.790923 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="extract-utilities" Sep 30 20:23:09 crc kubenswrapper[4603]: E0930 20:23:09.790956 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="registry-server" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.790968 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="registry-server" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.791282 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7235f1-e7ba-47f5-a767-cae8cf01a08a" containerName="registry-server" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.794746 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.809760 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.882224 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhm6r\" (UniqueName: \"kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.882291 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.882452 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.984896 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.985033 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhm6r\" (UniqueName: \"kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.985078 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.985424 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:09 crc kubenswrapper[4603]: I0930 20:23:09.985510 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:10 crc kubenswrapper[4603]: I0930 20:23:10.020455 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhm6r\" (UniqueName: \"kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r\") pod \"certified-operators-sg7nz\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:10 crc kubenswrapper[4603]: I0930 20:23:10.115278 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:10 crc kubenswrapper[4603]: I0930 20:23:10.678241 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:11 crc kubenswrapper[4603]: I0930 20:23:11.088091 4603 generic.go:334] "Generic (PLEG): container finished" podID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerID="7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098" exitCode=0 Sep 30 20:23:11 crc kubenswrapper[4603]: I0930 20:23:11.088217 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerDied","Data":"7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098"} Sep 30 20:23:11 crc kubenswrapper[4603]: I0930 20:23:11.088553 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerStarted","Data":"f2b0bff26b940a37be5270f5918c06267930ed31dde628a5dbd5bac0681a8598"} Sep 30 20:23:12 crc kubenswrapper[4603]: I0930 20:23:12.940743 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:12 crc kubenswrapper[4603]: I0930 20:23:12.941098 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:13 crc kubenswrapper[4603]: I0930 20:23:13.025922 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:13 crc kubenswrapper[4603]: I0930 20:23:13.107539 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerStarted","Data":"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f"} Sep 30 20:23:13 crc kubenswrapper[4603]: I0930 20:23:13.159303 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:14 crc kubenswrapper[4603]: I0930 20:23:14.124929 4603 generic.go:334] "Generic (PLEG): container finished" podID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerID="503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f" exitCode=0 Sep 30 20:23:14 crc kubenswrapper[4603]: I0930 20:23:14.125038 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerDied","Data":"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f"} Sep 30 20:23:15 crc kubenswrapper[4603]: I0930 20:23:15.139359 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerStarted","Data":"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa"} Sep 30 20:23:15 crc kubenswrapper[4603]: I0930 20:23:15.158683 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sg7nz" podStartSLOduration=2.7116517 podStartE2EDuration="6.158662609s" podCreationTimestamp="2025-09-30 20:23:09 +0000 UTC" firstStartedPulling="2025-09-30 20:23:11.090706591 +0000 UTC m=+2193.029165409" lastFinishedPulling="2025-09-30 20:23:14.53771746 +0000 UTC m=+2196.476176318" observedRunningTime="2025-09-30 20:23:15.155486121 +0000 UTC m=+2197.093944949" watchObservedRunningTime="2025-09-30 20:23:15.158662609 +0000 UTC m=+2197.097121427" Sep 30 20:23:15 crc kubenswrapper[4603]: I0930 20:23:15.377118 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:15 crc kubenswrapper[4603]: I0930 20:23:15.377371 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v8srs" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="registry-server" containerID="cri-o://28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c" gracePeriod=2 Sep 30 20:23:15 crc kubenswrapper[4603]: E0930 20:23:15.531276 4603 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8297faee_8163_41b8_bef6_aa11e4f2f1f3.slice/crio-conmon-28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:23:15 crc kubenswrapper[4603]: I0930 20:23:15.941479 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.050223 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities\") pod \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.050372 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfm2x\" (UniqueName: \"kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x\") pod \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.050406 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content\") pod \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\" (UID: \"8297faee-8163-41b8-bef6-aa11e4f2f1f3\") " Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.050990 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities" (OuterVolumeSpecName: "utilities") pod "8297faee-8163-41b8-bef6-aa11e4f2f1f3" (UID: "8297faee-8163-41b8-bef6-aa11e4f2f1f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.059447 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x" (OuterVolumeSpecName: "kube-api-access-qfm2x") pod "8297faee-8163-41b8-bef6-aa11e4f2f1f3" (UID: "8297faee-8163-41b8-bef6-aa11e4f2f1f3"). InnerVolumeSpecName "kube-api-access-qfm2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.150442 4603 generic.go:334] "Generic (PLEG): container finished" podID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerID="28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c" exitCode=0 Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.151515 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8srs" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.152008 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerDied","Data":"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c"} Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.152042 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8srs" event={"ID":"8297faee-8163-41b8-bef6-aa11e4f2f1f3","Type":"ContainerDied","Data":"55764a44a9615e14039b00e10c7ec26cd5c6d079b459272c02177a94933a7d0a"} Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.152064 4603 scope.go:117] "RemoveContainer" containerID="28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.156678 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.156709 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfm2x\" (UniqueName: \"kubernetes.io/projected/8297faee-8163-41b8-bef6-aa11e4f2f1f3-kube-api-access-qfm2x\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.174344 4603 scope.go:117] "RemoveContainer" containerID="a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.196364 4603 scope.go:117] "RemoveContainer" containerID="6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.238027 4603 scope.go:117] "RemoveContainer" containerID="28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c" Sep 30 20:23:16 crc kubenswrapper[4603]: E0930 20:23:16.238493 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c\": container with ID starting with 28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c not found: ID does not exist" containerID="28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.238523 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c"} err="failed to get container status \"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c\": rpc error: code = NotFound desc = could not find container \"28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c\": container with ID starting with 28a7565294ef22e9bc921dd991f8eb9d41837891ac9639d46c457c130f01523c not found: ID does not exist" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.238544 4603 scope.go:117] "RemoveContainer" containerID="a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c" Sep 30 20:23:16 crc kubenswrapper[4603]: E0930 20:23:16.239125 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c\": container with ID starting with a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c not found: ID does not exist" containerID="a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.239152 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c"} err="failed to get container status \"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c\": rpc error: code = NotFound desc = could not find container \"a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c\": container with ID starting with a72c07b2e8b93e679d731daf9158196ebf10605cb35c4a8e272fbb48eda5689c not found: ID does not exist" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.239191 4603 scope.go:117] "RemoveContainer" containerID="6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f" Sep 30 20:23:16 crc kubenswrapper[4603]: E0930 20:23:16.239651 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f\": container with ID starting with 6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f not found: ID does not exist" containerID="6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f" Sep 30 20:23:16 crc kubenswrapper[4603]: I0930 20:23:16.239673 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f"} err="failed to get container status \"6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f\": rpc error: code = NotFound desc = could not find container \"6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f\": container with ID starting with 6a517f673b469bfa59ee095f14a45c7bd28ff609b40e6dd998b609f6225da26f not found: ID does not exist" Sep 30 20:23:17 crc kubenswrapper[4603]: I0930 20:23:17.074850 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8297faee-8163-41b8-bef6-aa11e4f2f1f3" (UID: "8297faee-8163-41b8-bef6-aa11e4f2f1f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:17 crc kubenswrapper[4603]: I0930 20:23:17.175330 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8297faee-8163-41b8-bef6-aa11e4f2f1f3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:17 crc kubenswrapper[4603]: I0930 20:23:17.384881 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:17 crc kubenswrapper[4603]: I0930 20:23:17.393504 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v8srs"] Sep 30 20:23:18 crc kubenswrapper[4603]: I0930 20:23:18.779586 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" path="/var/lib/kubelet/pods/8297faee-8163-41b8-bef6-aa11e4f2f1f3/volumes" Sep 30 20:23:20 crc kubenswrapper[4603]: I0930 20:23:20.115824 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:20 crc kubenswrapper[4603]: I0930 20:23:20.116154 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:20 crc kubenswrapper[4603]: I0930 20:23:20.157885 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:20 crc kubenswrapper[4603]: I0930 20:23:20.245529 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:20 crc kubenswrapper[4603]: I0930 20:23:20.574869 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.214775 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sg7nz" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="registry-server" containerID="cri-o://cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa" gracePeriod=2 Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.694314 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.878993 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhm6r\" (UniqueName: \"kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r\") pod \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.879115 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities\") pod \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.879293 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content\") pod \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\" (UID: \"9b4dd387-5494-4ff5-bf4e-95eb61741f0c\") " Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.879963 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities" (OuterVolumeSpecName: "utilities") pod "9b4dd387-5494-4ff5-bf4e-95eb61741f0c" (UID: "9b4dd387-5494-4ff5-bf4e-95eb61741f0c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.886736 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r" (OuterVolumeSpecName: "kube-api-access-lhm6r") pod "9b4dd387-5494-4ff5-bf4e-95eb61741f0c" (UID: "9b4dd387-5494-4ff5-bf4e-95eb61741f0c"). InnerVolumeSpecName "kube-api-access-lhm6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.943394 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b4dd387-5494-4ff5-bf4e-95eb61741f0c" (UID: "9b4dd387-5494-4ff5-bf4e-95eb61741f0c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.982672 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhm6r\" (UniqueName: \"kubernetes.io/projected/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-kube-api-access-lhm6r\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.982751 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:22 crc kubenswrapper[4603]: I0930 20:23:22.982778 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b4dd387-5494-4ff5-bf4e-95eb61741f0c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.224694 4603 generic.go:334] "Generic (PLEG): container finished" podID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerID="cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa" exitCode=0 Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.225932 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerDied","Data":"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa"} Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.226115 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sg7nz" event={"ID":"9b4dd387-5494-4ff5-bf4e-95eb61741f0c","Type":"ContainerDied","Data":"f2b0bff26b940a37be5270f5918c06267930ed31dde628a5dbd5bac0681a8598"} Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.226085 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sg7nz" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.226197 4603 scope.go:117] "RemoveContainer" containerID="cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.255070 4603 scope.go:117] "RemoveContainer" containerID="503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.284203 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.293643 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sg7nz"] Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.301491 4603 scope.go:117] "RemoveContainer" containerID="7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.347217 4603 scope.go:117] "RemoveContainer" containerID="cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa" Sep 30 20:23:23 crc kubenswrapper[4603]: E0930 20:23:23.348891 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa\": container with ID starting with cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa not found: ID does not exist" containerID="cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.348934 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa"} err="failed to get container status \"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa\": rpc error: code = NotFound desc = could not find container \"cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa\": container with ID starting with cf60c2534f698f7e8a921fb1acfd2fc8a2847155852d9a70c5e8e47fdd2796fa not found: ID does not exist" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.348961 4603 scope.go:117] "RemoveContainer" containerID="503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f" Sep 30 20:23:23 crc kubenswrapper[4603]: E0930 20:23:23.349542 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f\": container with ID starting with 503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f not found: ID does not exist" containerID="503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.349577 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f"} err="failed to get container status \"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f\": rpc error: code = NotFound desc = could not find container \"503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f\": container with ID starting with 503edf7ec0eb15d11cd04b3b959d71de8fc4f8369e5e646ebc079f95da5df25f not found: ID does not exist" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.349596 4603 scope.go:117] "RemoveContainer" containerID="7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098" Sep 30 20:23:23 crc kubenswrapper[4603]: E0930 20:23:23.349886 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098\": container with ID starting with 7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098 not found: ID does not exist" containerID="7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.349922 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098"} err="failed to get container status \"7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098\": rpc error: code = NotFound desc = could not find container \"7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098\": container with ID starting with 7ee1bec41ac1c41d0bdcc4f26600fb9f1a583e24c5cc23e5149f678cb838c098 not found: ID does not exist" Sep 30 20:23:23 crc kubenswrapper[4603]: I0930 20:23:23.765234 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:23:23 crc kubenswrapper[4603]: E0930 20:23:23.766240 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:23:24 crc kubenswrapper[4603]: I0930 20:23:24.791012 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" path="/var/lib/kubelet/pods/9b4dd387-5494-4ff5-bf4e-95eb61741f0c/volumes" Sep 30 20:23:37 crc kubenswrapper[4603]: I0930 20:23:37.765742 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:23:37 crc kubenswrapper[4603]: E0930 20:23:37.766478 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:23:51 crc kubenswrapper[4603]: I0930 20:23:51.764527 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:23:51 crc kubenswrapper[4603]: E0930 20:23:51.765407 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:24:05 crc kubenswrapper[4603]: I0930 20:24:05.765298 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:24:05 crc kubenswrapper[4603]: E0930 20:24:05.765991 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:24:16 crc kubenswrapper[4603]: I0930 20:24:16.764570 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:24:16 crc kubenswrapper[4603]: E0930 20:24:16.765232 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:24:20 crc kubenswrapper[4603]: I0930 20:24:20.790800 4603 generic.go:334] "Generic (PLEG): container finished" podID="09cd14dc-05cd-4a02-adde-bd6cc7b55643" containerID="11127943c242731cf6aebf0948735aac09bac019b293b71ba31b74e25a4be904" exitCode=0 Sep 30 20:24:20 crc kubenswrapper[4603]: I0930 20:24:20.790873 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" event={"ID":"09cd14dc-05cd-4a02-adde-bd6cc7b55643","Type":"ContainerDied","Data":"11127943c242731cf6aebf0948735aac09bac019b293b71ba31b74e25a4be904"} Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.235986 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.370486 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg2g9\" (UniqueName: \"kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9\") pod \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.370528 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0\") pod \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.370649 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key\") pod \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.370709 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle\") pod \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.370742 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory\") pod \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\" (UID: \"09cd14dc-05cd-4a02-adde-bd6cc7b55643\") " Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.377132 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9" (OuterVolumeSpecName: "kube-api-access-hg2g9") pod "09cd14dc-05cd-4a02-adde-bd6cc7b55643" (UID: "09cd14dc-05cd-4a02-adde-bd6cc7b55643"). InnerVolumeSpecName "kube-api-access-hg2g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.377486 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "09cd14dc-05cd-4a02-adde-bd6cc7b55643" (UID: "09cd14dc-05cd-4a02-adde-bd6cc7b55643"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.409515 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory" (OuterVolumeSpecName: "inventory") pod "09cd14dc-05cd-4a02-adde-bd6cc7b55643" (UID: "09cd14dc-05cd-4a02-adde-bd6cc7b55643"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.413413 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "09cd14dc-05cd-4a02-adde-bd6cc7b55643" (UID: "09cd14dc-05cd-4a02-adde-bd6cc7b55643"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.435179 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "09cd14dc-05cd-4a02-adde-bd6cc7b55643" (UID: "09cd14dc-05cd-4a02-adde-bd6cc7b55643"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.472857 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg2g9\" (UniqueName: \"kubernetes.io/projected/09cd14dc-05cd-4a02-adde-bd6cc7b55643-kube-api-access-hg2g9\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.472891 4603 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.472903 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.472918 4603 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.472933 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/09cd14dc-05cd-4a02-adde-bd6cc7b55643-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.816362 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" event={"ID":"09cd14dc-05cd-4a02-adde-bd6cc7b55643","Type":"ContainerDied","Data":"f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310"} Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.816385 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-gdhjt" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.816398 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6e882bbe43c66e6dcdbb970a650e1fb2a449a222424c467c98987fde2929310" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902475 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5"] Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902825 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902836 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902853 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="extract-utilities" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902860 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="extract-utilities" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902870 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="extract-utilities" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902876 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="extract-utilities" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902889 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="extract-content" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902895 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="extract-content" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902920 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cd14dc-05cd-4a02-adde-bd6cc7b55643" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902926 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cd14dc-05cd-4a02-adde-bd6cc7b55643" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902935 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902940 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: E0930 20:24:22.902952 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="extract-content" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.902957 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="extract-content" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.903135 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cd14dc-05cd-4a02-adde-bd6cc7b55643" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.903150 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b4dd387-5494-4ff5-bf4e-95eb61741f0c" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.903173 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="8297faee-8163-41b8-bef6-aa11e4f2f1f3" containerName="registry-server" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.903860 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.907031 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.907113 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.907127 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.907032 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.907239 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.915184 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5"] Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.957615 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982130 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982194 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982216 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982328 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2v7f\" (UniqueName: \"kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982418 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:22 crc kubenswrapper[4603]: I0930 20:24:22.982440 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.084590 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.085036 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.085073 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.085110 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.085151 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.085274 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2v7f\" (UniqueName: \"kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.089220 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.089381 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.089600 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.090101 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.091570 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.107781 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2v7f\" (UniqueName: \"kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.262966 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:24:23 crc kubenswrapper[4603]: I0930 20:24:23.846989 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5"] Sep 30 20:24:24 crc kubenswrapper[4603]: I0930 20:24:24.849905 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" event={"ID":"404b3593-d4d1-4440-a645-8669f3676f09","Type":"ContainerStarted","Data":"f77f7fe93eb7766265bd7800a7190cc203b0fcd3b1e3450462ee1811cb09ebfb"} Sep 30 20:24:25 crc kubenswrapper[4603]: I0930 20:24:25.860399 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" event={"ID":"404b3593-d4d1-4440-a645-8669f3676f09","Type":"ContainerStarted","Data":"5fff900c5ff20044ce4974f65ac92e8daafa86798f8e0f30db513610d9ed17fc"} Sep 30 20:24:25 crc kubenswrapper[4603]: I0930 20:24:25.888069 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" podStartSLOduration=3.008569352 podStartE2EDuration="3.888047602s" podCreationTimestamp="2025-09-30 20:24:22 +0000 UTC" firstStartedPulling="2025-09-30 20:24:23.856728075 +0000 UTC m=+2265.795186893" lastFinishedPulling="2025-09-30 20:24:24.736206295 +0000 UTC m=+2266.674665143" observedRunningTime="2025-09-30 20:24:25.887192459 +0000 UTC m=+2267.825651277" watchObservedRunningTime="2025-09-30 20:24:25.888047602 +0000 UTC m=+2267.826506420" Sep 30 20:24:30 crc kubenswrapper[4603]: I0930 20:24:30.764489 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:24:30 crc kubenswrapper[4603]: E0930 20:24:30.765673 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:24:43 crc kubenswrapper[4603]: I0930 20:24:43.764807 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:24:43 crc kubenswrapper[4603]: E0930 20:24:43.765519 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:24:54 crc kubenswrapper[4603]: I0930 20:24:54.767510 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:24:54 crc kubenswrapper[4603]: E0930 20:24:54.768493 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:25:07 crc kubenswrapper[4603]: I0930 20:25:07.764736 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:25:07 crc kubenswrapper[4603]: E0930 20:25:07.765557 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:25:21 crc kubenswrapper[4603]: I0930 20:25:21.465696 4603 generic.go:334] "Generic (PLEG): container finished" podID="404b3593-d4d1-4440-a645-8669f3676f09" containerID="5fff900c5ff20044ce4974f65ac92e8daafa86798f8e0f30db513610d9ed17fc" exitCode=0 Sep 30 20:25:21 crc kubenswrapper[4603]: I0930 20:25:21.465802 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" event={"ID":"404b3593-d4d1-4440-a645-8669f3676f09","Type":"ContainerDied","Data":"5fff900c5ff20044ce4974f65ac92e8daafa86798f8e0f30db513610d9ed17fc"} Sep 30 20:25:21 crc kubenswrapper[4603]: I0930 20:25:21.764007 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:25:21 crc kubenswrapper[4603]: E0930 20:25:21.764324 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.848135 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.973371 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2v7f\" (UniqueName: \"kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.973440 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.973472 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.973581 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.974344 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.974378 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle\") pod \"404b3593-d4d1-4440-a645-8669f3676f09\" (UID: \"404b3593-d4d1-4440-a645-8669f3676f09\") " Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.986053 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f" (OuterVolumeSpecName: "kube-api-access-j2v7f") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "kube-api-access-j2v7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:25:22 crc kubenswrapper[4603]: I0930 20:25:22.993868 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.006067 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.006132 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory" (OuterVolumeSpecName: "inventory") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.008735 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.025941 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "404b3593-d4d1-4440-a645-8669f3676f09" (UID: "404b3593-d4d1-4440-a645-8669f3676f09"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077274 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2v7f\" (UniqueName: \"kubernetes.io/projected/404b3593-d4d1-4440-a645-8669f3676f09-kube-api-access-j2v7f\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077324 4603 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077342 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077356 4603 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077368 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.077381 4603 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/404b3593-d4d1-4440-a645-8669f3676f09-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.487502 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" event={"ID":"404b3593-d4d1-4440-a645-8669f3676f09","Type":"ContainerDied","Data":"f77f7fe93eb7766265bd7800a7190cc203b0fcd3b1e3450462ee1811cb09ebfb"} Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.487929 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f77f7fe93eb7766265bd7800a7190cc203b0fcd3b1e3450462ee1811cb09ebfb" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.487626 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.637967 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h"] Sep 30 20:25:23 crc kubenswrapper[4603]: E0930 20:25:23.638642 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="404b3593-d4d1-4440-a645-8669f3676f09" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.638766 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="404b3593-d4d1-4440-a645-8669f3676f09" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.639143 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="404b3593-d4d1-4440-a645-8669f3676f09" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.639990 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.644341 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.644675 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.644741 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.644815 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.645625 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.672227 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h"] Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.690323 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.690591 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.690751 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.690951 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq69g\" (UniqueName: \"kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.691075 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.793832 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.794181 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq69g\" (UniqueName: \"kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.794258 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.794294 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.794331 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.813826 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.814635 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.814958 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.817885 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.824661 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq69g\" (UniqueName: \"kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:23 crc kubenswrapper[4603]: I0930 20:25:23.964469 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:25:24 crc kubenswrapper[4603]: I0930 20:25:24.507528 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h"] Sep 30 20:25:25 crc kubenswrapper[4603]: I0930 20:25:25.512985 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" event={"ID":"fbf155fd-4bef-49a0-8bf0-eb16974f5e89","Type":"ContainerStarted","Data":"90cdc403ab9bdcfab9781926db9ea2b0399d1df4e5cab2533c0803c6908c6fbf"} Sep 30 20:25:25 crc kubenswrapper[4603]: I0930 20:25:25.513271 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" event={"ID":"fbf155fd-4bef-49a0-8bf0-eb16974f5e89","Type":"ContainerStarted","Data":"d348d7729cafb2e4affaf2ba73466cbb78846c9eaeb4e46e253003b3e45f1efa"} Sep 30 20:25:25 crc kubenswrapper[4603]: I0930 20:25:25.540431 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" podStartSLOduration=2.363008192 podStartE2EDuration="2.540412219s" podCreationTimestamp="2025-09-30 20:25:23 +0000 UTC" firstStartedPulling="2025-09-30 20:25:24.513944423 +0000 UTC m=+2326.452403251" lastFinishedPulling="2025-09-30 20:25:24.69134845 +0000 UTC m=+2326.629807278" observedRunningTime="2025-09-30 20:25:25.531406491 +0000 UTC m=+2327.469865309" watchObservedRunningTime="2025-09-30 20:25:25.540412219 +0000 UTC m=+2327.478871037" Sep 30 20:25:35 crc kubenswrapper[4603]: I0930 20:25:35.764992 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:25:35 crc kubenswrapper[4603]: E0930 20:25:35.765750 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:25:46 crc kubenswrapper[4603]: I0930 20:25:46.764716 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:25:46 crc kubenswrapper[4603]: E0930 20:25:46.765464 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:25:57 crc kubenswrapper[4603]: I0930 20:25:57.764691 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:25:57 crc kubenswrapper[4603]: E0930 20:25:57.765397 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:26:09 crc kubenswrapper[4603]: I0930 20:26:09.764498 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:26:09 crc kubenswrapper[4603]: E0930 20:26:09.765263 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:26:20 crc kubenswrapper[4603]: I0930 20:26:20.764818 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:26:20 crc kubenswrapper[4603]: E0930 20:26:20.765714 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:26:31 crc kubenswrapper[4603]: I0930 20:26:31.764940 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:26:31 crc kubenswrapper[4603]: E0930 20:26:31.765936 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:26:44 crc kubenswrapper[4603]: I0930 20:26:44.766075 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:26:44 crc kubenswrapper[4603]: E0930 20:26:44.766912 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:26:55 crc kubenswrapper[4603]: I0930 20:26:55.765109 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:26:55 crc kubenswrapper[4603]: E0930 20:26:55.765858 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:27:08 crc kubenswrapper[4603]: I0930 20:27:08.786781 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:27:08 crc kubenswrapper[4603]: E0930 20:27:08.788070 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:27:21 crc kubenswrapper[4603]: I0930 20:27:21.763955 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:27:21 crc kubenswrapper[4603]: E0930 20:27:21.764899 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:27:36 crc kubenswrapper[4603]: I0930 20:27:36.766153 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:27:36 crc kubenswrapper[4603]: E0930 20:27:36.769502 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:27:48 crc kubenswrapper[4603]: I0930 20:27:48.770839 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:27:48 crc kubenswrapper[4603]: E0930 20:27:48.771628 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:28:03 crc kubenswrapper[4603]: I0930 20:28:03.764336 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:28:03 crc kubenswrapper[4603]: E0930 20:28:03.765387 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:28:18 crc kubenswrapper[4603]: I0930 20:28:18.766344 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:28:19 crc kubenswrapper[4603]: I0930 20:28:19.267328 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9"} Sep 30 20:28:52 crc kubenswrapper[4603]: I0930 20:28:52.748914 4603 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-kmwk4 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 20:28:52 crc kubenswrapper[4603]: I0930 20:28:52.749635 4603 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-kmwk4" podUID="3b3a59ee-ec27-4879-9a3b-e7004d4394d9" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.176810 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5"] Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.179066 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.186811 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.189115 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.190482 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5"] Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.232737 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.233099 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4s27\" (UniqueName: \"kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.233224 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.335543 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4s27\" (UniqueName: \"kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.335795 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.335926 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.337884 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.353109 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4s27\" (UniqueName: \"kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.359084 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume\") pod \"collect-profiles-29321070-dlnb5\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.504928 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:00 crc kubenswrapper[4603]: I0930 20:30:00.982054 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5"] Sep 30 20:30:01 crc kubenswrapper[4603]: I0930 20:30:01.310041 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" event={"ID":"e7efceb0-c8e0-4158-a53f-f6ff648341c3","Type":"ContainerStarted","Data":"a033824345e3f649e97585d5927ed83162bb17e126725242802a58929098ff8e"} Sep 30 20:30:01 crc kubenswrapper[4603]: I0930 20:30:01.310346 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" event={"ID":"e7efceb0-c8e0-4158-a53f-f6ff648341c3","Type":"ContainerStarted","Data":"99e0d7f65c3071ba1fd87a9aa4116a1110716681eaa8ae29d2396853327cf3ee"} Sep 30 20:30:01 crc kubenswrapper[4603]: I0930 20:30:01.328711 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" podStartSLOduration=1.328695268 podStartE2EDuration="1.328695268s" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:01.326110247 +0000 UTC m=+2603.264569065" watchObservedRunningTime="2025-09-30 20:30:01.328695268 +0000 UTC m=+2603.267154086" Sep 30 20:30:02 crc kubenswrapper[4603]: I0930 20:30:02.323444 4603 generic.go:334] "Generic (PLEG): container finished" podID="e7efceb0-c8e0-4158-a53f-f6ff648341c3" containerID="a033824345e3f649e97585d5927ed83162bb17e126725242802a58929098ff8e" exitCode=0 Sep 30 20:30:02 crc kubenswrapper[4603]: I0930 20:30:02.323494 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" event={"ID":"e7efceb0-c8e0-4158-a53f-f6ff648341c3","Type":"ContainerDied","Data":"a033824345e3f649e97585d5927ed83162bb17e126725242802a58929098ff8e"} Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.646846 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.805818 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume\") pod \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.806458 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4s27\" (UniqueName: \"kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27\") pod \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.806546 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume\") pod \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\" (UID: \"e7efceb0-c8e0-4158-a53f-f6ff648341c3\") " Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.807176 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume" (OuterVolumeSpecName: "config-volume") pod "e7efceb0-c8e0-4158-a53f-f6ff648341c3" (UID: "e7efceb0-c8e0-4158-a53f-f6ff648341c3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.812861 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27" (OuterVolumeSpecName: "kube-api-access-x4s27") pod "e7efceb0-c8e0-4158-a53f-f6ff648341c3" (UID: "e7efceb0-c8e0-4158-a53f-f6ff648341c3"). InnerVolumeSpecName "kube-api-access-x4s27". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.814622 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e7efceb0-c8e0-4158-a53f-f6ff648341c3" (UID: "e7efceb0-c8e0-4158-a53f-f6ff648341c3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.909078 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4s27\" (UniqueName: \"kubernetes.io/projected/e7efceb0-c8e0-4158-a53f-f6ff648341c3-kube-api-access-x4s27\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.909108 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7efceb0-c8e0-4158-a53f-f6ff648341c3-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:03 crc kubenswrapper[4603]: I0930 20:30:03.909119 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7efceb0-c8e0-4158-a53f-f6ff648341c3-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.342752 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" event={"ID":"e7efceb0-c8e0-4158-a53f-f6ff648341c3","Type":"ContainerDied","Data":"99e0d7f65c3071ba1fd87a9aa4116a1110716681eaa8ae29d2396853327cf3ee"} Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.343361 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99e0d7f65c3071ba1fd87a9aa4116a1110716681eaa8ae29d2396853327cf3ee" Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.342916 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5" Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.724081 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j"] Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.732154 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321025-fvp2j"] Sep 30 20:30:04 crc kubenswrapper[4603]: I0930 20:30:04.776130 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4db563e4-c2e0-43d9-bc1c-0961bca8cf09" path="/var/lib/kubelet/pods/4db563e4-c2e0-43d9-bc1c-0961bca8cf09/volumes" Sep 30 20:30:23 crc kubenswrapper[4603]: I0930 20:30:23.541331 4603 generic.go:334] "Generic (PLEG): container finished" podID="fbf155fd-4bef-49a0-8bf0-eb16974f5e89" containerID="90cdc403ab9bdcfab9781926db9ea2b0399d1df4e5cab2533c0803c6908c6fbf" exitCode=0 Sep 30 20:30:23 crc kubenswrapper[4603]: I0930 20:30:23.541403 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" event={"ID":"fbf155fd-4bef-49a0-8bf0-eb16974f5e89","Type":"ContainerDied","Data":"90cdc403ab9bdcfab9781926db9ea2b0399d1df4e5cab2533c0803c6908c6fbf"} Sep 30 20:30:24 crc kubenswrapper[4603]: I0930 20:30:24.977741 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.062594 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle\") pod \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.062700 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key\") pod \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.062796 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0\") pod \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.062821 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory\") pod \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.062864 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq69g\" (UniqueName: \"kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g\") pod \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\" (UID: \"fbf155fd-4bef-49a0-8bf0-eb16974f5e89\") " Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.068188 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g" (OuterVolumeSpecName: "kube-api-access-rq69g") pod "fbf155fd-4bef-49a0-8bf0-eb16974f5e89" (UID: "fbf155fd-4bef-49a0-8bf0-eb16974f5e89"). InnerVolumeSpecName "kube-api-access-rq69g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.085629 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "fbf155fd-4bef-49a0-8bf0-eb16974f5e89" (UID: "fbf155fd-4bef-49a0-8bf0-eb16974f5e89"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.093409 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fbf155fd-4bef-49a0-8bf0-eb16974f5e89" (UID: "fbf155fd-4bef-49a0-8bf0-eb16974f5e89"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.094582 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "fbf155fd-4bef-49a0-8bf0-eb16974f5e89" (UID: "fbf155fd-4bef-49a0-8bf0-eb16974f5e89"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.095831 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory" (OuterVolumeSpecName: "inventory") pod "fbf155fd-4bef-49a0-8bf0-eb16974f5e89" (UID: "fbf155fd-4bef-49a0-8bf0-eb16974f5e89"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.165386 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.165450 4603 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.165465 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.165475 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq69g\" (UniqueName: \"kubernetes.io/projected/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-kube-api-access-rq69g\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.165485 4603 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbf155fd-4bef-49a0-8bf0-eb16974f5e89-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.562239 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" event={"ID":"fbf155fd-4bef-49a0-8bf0-eb16974f5e89","Type":"ContainerDied","Data":"d348d7729cafb2e4affaf2ba73466cbb78846c9eaeb4e46e253003b3e45f1efa"} Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.562283 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d348d7729cafb2e4affaf2ba73466cbb78846c9eaeb4e46e253003b3e45f1efa" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.562358 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.642443 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx"] Sep 30 20:30:25 crc kubenswrapper[4603]: E0930 20:30:25.642829 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf155fd-4bef-49a0-8bf0-eb16974f5e89" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.642846 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf155fd-4bef-49a0-8bf0-eb16974f5e89" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:30:25 crc kubenswrapper[4603]: E0930 20:30:25.642893 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7efceb0-c8e0-4158-a53f-f6ff648341c3" containerName="collect-profiles" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.642900 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7efceb0-c8e0-4158-a53f-f6ff648341c3" containerName="collect-profiles" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.643066 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7efceb0-c8e0-4158-a53f-f6ff648341c3" containerName="collect-profiles" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.643085 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf155fd-4bef-49a0-8bf0-eb16974f5e89" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.643688 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.646178 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.646228 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.646377 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.657745 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx"] Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.657857 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.657863 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.658855 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.659335 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.775620 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.775769 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.775822 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.775949 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.776032 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.776116 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.776254 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwv7l\" (UniqueName: \"kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.776347 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.776420 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878107 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878149 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878209 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878242 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878275 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878347 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwv7l\" (UniqueName: \"kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878416 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878453 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.878494 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.879906 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.883500 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.884420 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.884504 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.885338 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.886068 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.887601 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.889375 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.898367 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwv7l\" (UniqueName: \"kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tkcxx\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:25 crc kubenswrapper[4603]: I0930 20:30:25.960158 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:30:26 crc kubenswrapper[4603]: I0930 20:30:26.503618 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:30:26 crc kubenswrapper[4603]: I0930 20:30:26.505531 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx"] Sep 30 20:30:26 crc kubenswrapper[4603]: I0930 20:30:26.572984 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" event={"ID":"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9","Type":"ContainerStarted","Data":"b8e7095cd505e71810c56cf8d0cb5a89fe84cc8d269df84da5e3730da36d0ab0"} Sep 30 20:30:27 crc kubenswrapper[4603]: I0930 20:30:27.588617 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" event={"ID":"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9","Type":"ContainerStarted","Data":"f3554588e65c051e3b4a5c74972a4988bd86e528e7919cfa56e3d217945ac284"} Sep 30 20:30:27 crc kubenswrapper[4603]: I0930 20:30:27.623442 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" podStartSLOduration=2.468681801 podStartE2EDuration="2.623416332s" podCreationTimestamp="2025-09-30 20:30:25 +0000 UTC" firstStartedPulling="2025-09-30 20:30:26.503326615 +0000 UTC m=+2628.441785433" lastFinishedPulling="2025-09-30 20:30:26.658061146 +0000 UTC m=+2628.596519964" observedRunningTime="2025-09-30 20:30:27.614596938 +0000 UTC m=+2629.553055776" watchObservedRunningTime="2025-09-30 20:30:27.623416332 +0000 UTC m=+2629.561875160" Sep 30 20:30:38 crc kubenswrapper[4603]: I0930 20:30:38.442130 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:30:38 crc kubenswrapper[4603]: I0930 20:30:38.442946 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:30:46 crc kubenswrapper[4603]: I0930 20:30:46.453809 4603 scope.go:117] "RemoveContainer" containerID="037a89d66a349dee24f94b7d9e9e69664e188515820a458257236d6ba0437a15" Sep 30 20:31:08 crc kubenswrapper[4603]: I0930 20:31:08.441996 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:08 crc kubenswrapper[4603]: I0930 20:31:08.442602 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.541851 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.546325 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.562288 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.601767 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2z2l\" (UniqueName: \"kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.601832 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.601957 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.703630 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.703779 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2z2l\" (UniqueName: \"kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.703819 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.704337 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.704606 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.721938 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2z2l\" (UniqueName: \"kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l\") pod \"redhat-operators-bxtp5\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:11 crc kubenswrapper[4603]: I0930 20:31:11.879049 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:12 crc kubenswrapper[4603]: I0930 20:31:12.372498 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:12 crc kubenswrapper[4603]: I0930 20:31:12.998637 4603 generic.go:334] "Generic (PLEG): container finished" podID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerID="61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e" exitCode=0 Sep 30 20:31:12 crc kubenswrapper[4603]: I0930 20:31:12.998831 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerDied","Data":"61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e"} Sep 30 20:31:12 crc kubenswrapper[4603]: I0930 20:31:12.999052 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerStarted","Data":"7c4defc05b8b644cdf70fc17a2ade910ee4656bf178e038b742ffa998f865be5"} Sep 30 20:31:15 crc kubenswrapper[4603]: I0930 20:31:15.018268 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerStarted","Data":"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0"} Sep 30 20:31:16 crc kubenswrapper[4603]: I0930 20:31:16.030906 4603 generic.go:334] "Generic (PLEG): container finished" podID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerID="a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0" exitCode=0 Sep 30 20:31:16 crc kubenswrapper[4603]: I0930 20:31:16.030965 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerDied","Data":"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0"} Sep 30 20:31:20 crc kubenswrapper[4603]: I0930 20:31:20.081025 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerStarted","Data":"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4"} Sep 30 20:31:20 crc kubenswrapper[4603]: I0930 20:31:20.108698 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bxtp5" podStartSLOduration=2.3480861060000002 podStartE2EDuration="9.108676887s" podCreationTimestamp="2025-09-30 20:31:11 +0000 UTC" firstStartedPulling="2025-09-30 20:31:13.000669264 +0000 UTC m=+2674.939128082" lastFinishedPulling="2025-09-30 20:31:19.761260015 +0000 UTC m=+2681.699718863" observedRunningTime="2025-09-30 20:31:20.105371576 +0000 UTC m=+2682.043830424" watchObservedRunningTime="2025-09-30 20:31:20.108676887 +0000 UTC m=+2682.047135715" Sep 30 20:31:21 crc kubenswrapper[4603]: I0930 20:31:21.880263 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:21 crc kubenswrapper[4603]: I0930 20:31:21.880637 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:22 crc kubenswrapper[4603]: I0930 20:31:22.932994 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bxtp5" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" probeResult="failure" output=< Sep 30 20:31:22 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:31:22 crc kubenswrapper[4603]: > Sep 30 20:31:32 crc kubenswrapper[4603]: I0930 20:31:32.936081 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bxtp5" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" probeResult="failure" output=< Sep 30 20:31:32 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:31:32 crc kubenswrapper[4603]: > Sep 30 20:31:38 crc kubenswrapper[4603]: I0930 20:31:38.441113 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:38 crc kubenswrapper[4603]: I0930 20:31:38.441671 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:38 crc kubenswrapper[4603]: I0930 20:31:38.441713 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:31:38 crc kubenswrapper[4603]: I0930 20:31:38.442433 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:31:38 crc kubenswrapper[4603]: I0930 20:31:38.442508 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9" gracePeriod=600 Sep 30 20:31:39 crc kubenswrapper[4603]: I0930 20:31:39.304622 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9" exitCode=0 Sep 30 20:31:39 crc kubenswrapper[4603]: I0930 20:31:39.304705 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9"} Sep 30 20:31:39 crc kubenswrapper[4603]: I0930 20:31:39.305277 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe"} Sep 30 20:31:39 crc kubenswrapper[4603]: I0930 20:31:39.305328 4603 scope.go:117] "RemoveContainer" containerID="617bc4798f487e7f8c0fc4fafa83e7a57f614d81c7a093cdcc476338c41f9976" Sep 30 20:31:41 crc kubenswrapper[4603]: I0930 20:31:41.918150 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-9cfbc4c69-xfx2z" podUID="6d92de7a-d198-431c-a00c-bf93f63890c0" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 502" Sep 30 20:31:41 crc kubenswrapper[4603]: I0930 20:31:41.945568 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:42 crc kubenswrapper[4603]: I0930 20:31:42.008287 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:42 crc kubenswrapper[4603]: I0930 20:31:42.718007 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.348284 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bxtp5" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" containerID="cri-o://5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4" gracePeriod=2 Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.764477 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.838311 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content\") pod \"d80da3df-f70c-43f2-b806-2ea118a77abc\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.842305 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2z2l\" (UniqueName: \"kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l\") pod \"d80da3df-f70c-43f2-b806-2ea118a77abc\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.842461 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities\") pod \"d80da3df-f70c-43f2-b806-2ea118a77abc\" (UID: \"d80da3df-f70c-43f2-b806-2ea118a77abc\") " Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.844578 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities" (OuterVolumeSpecName: "utilities") pod "d80da3df-f70c-43f2-b806-2ea118a77abc" (UID: "d80da3df-f70c-43f2-b806-2ea118a77abc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.849301 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l" (OuterVolumeSpecName: "kube-api-access-m2z2l") pod "d80da3df-f70c-43f2-b806-2ea118a77abc" (UID: "d80da3df-f70c-43f2-b806-2ea118a77abc"). InnerVolumeSpecName "kube-api-access-m2z2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.937967 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d80da3df-f70c-43f2-b806-2ea118a77abc" (UID: "d80da3df-f70c-43f2-b806-2ea118a77abc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.944599 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.944623 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d80da3df-f70c-43f2-b806-2ea118a77abc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4603]: I0930 20:31:43.944654 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2z2l\" (UniqueName: \"kubernetes.io/projected/d80da3df-f70c-43f2-b806-2ea118a77abc-kube-api-access-m2z2l\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.360552 4603 generic.go:334] "Generic (PLEG): container finished" podID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerID="5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4" exitCode=0 Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.360730 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerDied","Data":"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4"} Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.360797 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxtp5" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.361003 4603 scope.go:117] "RemoveContainer" containerID="5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.360983 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxtp5" event={"ID":"d80da3df-f70c-43f2-b806-2ea118a77abc","Type":"ContainerDied","Data":"7c4defc05b8b644cdf70fc17a2ade910ee4656bf178e038b742ffa998f865be5"} Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.396327 4603 scope.go:117] "RemoveContainer" containerID="a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.397573 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.408691 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bxtp5"] Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.427148 4603 scope.go:117] "RemoveContainer" containerID="61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.493146 4603 scope.go:117] "RemoveContainer" containerID="5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4" Sep 30 20:31:44 crc kubenswrapper[4603]: E0930 20:31:44.493727 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4\": container with ID starting with 5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4 not found: ID does not exist" containerID="5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.493789 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4"} err="failed to get container status \"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4\": rpc error: code = NotFound desc = could not find container \"5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4\": container with ID starting with 5f6b4f69aea2fc28cb5758562ea95fc0f68c4a8271781833a0543262c0fcfbb4 not found: ID does not exist" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.493820 4603 scope.go:117] "RemoveContainer" containerID="a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0" Sep 30 20:31:44 crc kubenswrapper[4603]: E0930 20:31:44.494303 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0\": container with ID starting with a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0 not found: ID does not exist" containerID="a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.494342 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0"} err="failed to get container status \"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0\": rpc error: code = NotFound desc = could not find container \"a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0\": container with ID starting with a6bd0f36ab3dc97a35cecbf7fcfe51cb9607fae74d41241cb878317834f732f0 not found: ID does not exist" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.494372 4603 scope.go:117] "RemoveContainer" containerID="61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e" Sep 30 20:31:44 crc kubenswrapper[4603]: E0930 20:31:44.495518 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e\": container with ID starting with 61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e not found: ID does not exist" containerID="61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.495554 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e"} err="failed to get container status \"61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e\": rpc error: code = NotFound desc = could not find container \"61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e\": container with ID starting with 61e7aa500d875af15bf4ddb98f7f602b5b5f92f00ec756afb9e5c6e60499e68e not found: ID does not exist" Sep 30 20:31:44 crc kubenswrapper[4603]: I0930 20:31:44.781286 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" path="/var/lib/kubelet/pods/d80da3df-f70c-43f2-b806-2ea118a77abc/volumes" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.710300 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:12 crc kubenswrapper[4603]: E0930 20:33:12.711053 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="extract-utilities" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.711067 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="extract-utilities" Sep 30 20:33:12 crc kubenswrapper[4603]: E0930 20:33:12.711086 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.711094 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" Sep 30 20:33:12 crc kubenswrapper[4603]: E0930 20:33:12.711120 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="extract-content" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.711126 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="extract-content" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.711332 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80da3df-f70c-43f2-b806-2ea118a77abc" containerName="registry-server" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.712547 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.743847 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.770575 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.770617 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h76zt\" (UniqueName: \"kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.770674 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.871931 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.872226 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.872255 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h76zt\" (UniqueName: \"kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.872473 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.873275 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:12 crc kubenswrapper[4603]: I0930 20:33:12.894488 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h76zt\" (UniqueName: \"kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt\") pod \"community-operators-fm84p\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:13 crc kubenswrapper[4603]: I0930 20:33:13.038075 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:13 crc kubenswrapper[4603]: I0930 20:33:13.680375 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:14 crc kubenswrapper[4603]: I0930 20:33:14.264531 4603 generic.go:334] "Generic (PLEG): container finished" podID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerID="ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b" exitCode=0 Sep 30 20:33:14 crc kubenswrapper[4603]: I0930 20:33:14.264587 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerDied","Data":"ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b"} Sep 30 20:33:14 crc kubenswrapper[4603]: I0930 20:33:14.264620 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerStarted","Data":"119d364b8086f3bb90aa3e7e0870b3494fc80481b3d3008573e1319ab5e854e9"} Sep 30 20:33:16 crc kubenswrapper[4603]: I0930 20:33:16.283177 4603 generic.go:334] "Generic (PLEG): container finished" podID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerID="2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605" exitCode=0 Sep 30 20:33:16 crc kubenswrapper[4603]: I0930 20:33:16.283376 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerDied","Data":"2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605"} Sep 30 20:33:17 crc kubenswrapper[4603]: I0930 20:33:17.295159 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerStarted","Data":"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d"} Sep 30 20:33:17 crc kubenswrapper[4603]: I0930 20:33:17.322922 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fm84p" podStartSLOduration=2.525813934 podStartE2EDuration="5.322901639s" podCreationTimestamp="2025-09-30 20:33:12 +0000 UTC" firstStartedPulling="2025-09-30 20:33:14.26771674 +0000 UTC m=+2796.206175568" lastFinishedPulling="2025-09-30 20:33:17.064804445 +0000 UTC m=+2799.003263273" observedRunningTime="2025-09-30 20:33:17.316386682 +0000 UTC m=+2799.254845500" watchObservedRunningTime="2025-09-30 20:33:17.322901639 +0000 UTC m=+2799.261360457" Sep 30 20:33:23 crc kubenswrapper[4603]: I0930 20:33:23.038581 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:23 crc kubenswrapper[4603]: I0930 20:33:23.039111 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:23 crc kubenswrapper[4603]: I0930 20:33:23.117050 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:23 crc kubenswrapper[4603]: I0930 20:33:23.419662 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:23 crc kubenswrapper[4603]: I0930 20:33:23.465552 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:25 crc kubenswrapper[4603]: I0930 20:33:25.367685 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fm84p" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="registry-server" containerID="cri-o://b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d" gracePeriod=2 Sep 30 20:33:25 crc kubenswrapper[4603]: I0930 20:33:25.831425 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.025093 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities\") pod \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.025293 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content\") pod \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.025343 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h76zt\" (UniqueName: \"kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt\") pod \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\" (UID: \"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a\") " Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.026243 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities" (OuterVolumeSpecName: "utilities") pod "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" (UID: "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.026654 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.037447 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt" (OuterVolumeSpecName: "kube-api-access-h76zt") pod "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" (UID: "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a"). InnerVolumeSpecName "kube-api-access-h76zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.128686 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h76zt\" (UniqueName: \"kubernetes.io/projected/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-kube-api-access-h76zt\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.240635 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" (UID: "81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.334067 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.376572 4603 generic.go:334] "Generic (PLEG): container finished" podID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerID="b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d" exitCode=0 Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.376798 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerDied","Data":"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d"} Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.377694 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fm84p" event={"ID":"81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a","Type":"ContainerDied","Data":"119d364b8086f3bb90aa3e7e0870b3494fc80481b3d3008573e1319ab5e854e9"} Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.377817 4603 scope.go:117] "RemoveContainer" containerID="b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.376871 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fm84p" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.404613 4603 scope.go:117] "RemoveContainer" containerID="2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.415067 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.424397 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fm84p"] Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.448322 4603 scope.go:117] "RemoveContainer" containerID="ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.490219 4603 scope.go:117] "RemoveContainer" containerID="b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d" Sep 30 20:33:26 crc kubenswrapper[4603]: E0930 20:33:26.490682 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d\": container with ID starting with b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d not found: ID does not exist" containerID="b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.490721 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d"} err="failed to get container status \"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d\": rpc error: code = NotFound desc = could not find container \"b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d\": container with ID starting with b5c2f9a99b23bae52d7d68a479a010172f19a839a1b63af9dd9e9df4f23a477d not found: ID does not exist" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.490748 4603 scope.go:117] "RemoveContainer" containerID="2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605" Sep 30 20:33:26 crc kubenswrapper[4603]: E0930 20:33:26.491240 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605\": container with ID starting with 2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605 not found: ID does not exist" containerID="2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.491261 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605"} err="failed to get container status \"2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605\": rpc error: code = NotFound desc = could not find container \"2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605\": container with ID starting with 2cc7ec2017d07b3bd3977e3deadc803c4323da4a7d255ab0a4056942c1bc7605 not found: ID does not exist" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.491275 4603 scope.go:117] "RemoveContainer" containerID="ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b" Sep 30 20:33:26 crc kubenswrapper[4603]: E0930 20:33:26.491855 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b\": container with ID starting with ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b not found: ID does not exist" containerID="ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.491875 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b"} err="failed to get container status \"ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b\": rpc error: code = NotFound desc = could not find container \"ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b\": container with ID starting with ec3eb5469fa7b16b62ab1e1f57cf1ae39b6f2d0b549e0522dc29921fce1b924b not found: ID does not exist" Sep 30 20:33:26 crc kubenswrapper[4603]: I0930 20:33:26.789216 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" path="/var/lib/kubelet/pods/81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a/volumes" Sep 30 20:33:38 crc kubenswrapper[4603]: I0930 20:33:38.441447 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:33:38 crc kubenswrapper[4603]: I0930 20:33:38.441990 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:33:46 crc kubenswrapper[4603]: I0930 20:33:46.996884 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:33:46 crc kubenswrapper[4603]: E0930 20:33:46.997937 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="extract-utilities" Sep 30 20:33:46 crc kubenswrapper[4603]: I0930 20:33:46.997954 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="extract-utilities" Sep 30 20:33:46 crc kubenswrapper[4603]: E0930 20:33:46.997982 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="extract-content" Sep 30 20:33:46 crc kubenswrapper[4603]: I0930 20:33:46.997990 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="extract-content" Sep 30 20:33:46 crc kubenswrapper[4603]: E0930 20:33:46.998019 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="registry-server" Sep 30 20:33:46 crc kubenswrapper[4603]: I0930 20:33:46.998030 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="registry-server" Sep 30 20:33:46 crc kubenswrapper[4603]: I0930 20:33:46.998305 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="81be94e3-dcdc-4d2a-ba20-bf5aee79cc8a" containerName="registry-server" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.000044 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.031079 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.096450 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fs2s\" (UniqueName: \"kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.096575 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.096655 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.198696 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fs2s\" (UniqueName: \"kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.198758 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.198795 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.199292 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.199381 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.221664 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fs2s\" (UniqueName: \"kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s\") pod \"certified-operators-lsw8s\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.320537 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:47 crc kubenswrapper[4603]: I0930 20:33:47.851791 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:33:48 crc kubenswrapper[4603]: I0930 20:33:48.604565 4603 generic.go:334] "Generic (PLEG): container finished" podID="83ac5f5c-a558-4671-8857-6612baf310e4" containerID="437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7" exitCode=0 Sep 30 20:33:48 crc kubenswrapper[4603]: I0930 20:33:48.604665 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerDied","Data":"437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7"} Sep 30 20:33:48 crc kubenswrapper[4603]: I0930 20:33:48.604950 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerStarted","Data":"ac52079600158f035b8f8d04d379127aeffe9e05fde21ce08223cf44487627b3"} Sep 30 20:33:50 crc kubenswrapper[4603]: I0930 20:33:50.623308 4603 generic.go:334] "Generic (PLEG): container finished" podID="83ac5f5c-a558-4671-8857-6612baf310e4" containerID="a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08" exitCode=0 Sep 30 20:33:50 crc kubenswrapper[4603]: I0930 20:33:50.623658 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerDied","Data":"a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08"} Sep 30 20:33:51 crc kubenswrapper[4603]: I0930 20:33:51.634375 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerStarted","Data":"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c"} Sep 30 20:33:51 crc kubenswrapper[4603]: I0930 20:33:51.667860 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lsw8s" podStartSLOduration=3.150815429 podStartE2EDuration="5.66783176s" podCreationTimestamp="2025-09-30 20:33:46 +0000 UTC" firstStartedPulling="2025-09-30 20:33:48.606803382 +0000 UTC m=+2830.545262200" lastFinishedPulling="2025-09-30 20:33:51.123819713 +0000 UTC m=+2833.062278531" observedRunningTime="2025-09-30 20:33:51.653485448 +0000 UTC m=+2833.591944306" watchObservedRunningTime="2025-09-30 20:33:51.66783176 +0000 UTC m=+2833.606290608" Sep 30 20:33:57 crc kubenswrapper[4603]: I0930 20:33:57.321398 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:57 crc kubenswrapper[4603]: I0930 20:33:57.321858 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:57 crc kubenswrapper[4603]: I0930 20:33:57.393474 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:57 crc kubenswrapper[4603]: I0930 20:33:57.747260 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:33:57 crc kubenswrapper[4603]: I0930 20:33:57.798935 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:33:59 crc kubenswrapper[4603]: I0930 20:33:59.704995 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lsw8s" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="registry-server" containerID="cri-o://f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c" gracePeriod=2 Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.204767 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.351078 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities\") pod \"83ac5f5c-a558-4671-8857-6612baf310e4\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.351196 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content\") pod \"83ac5f5c-a558-4671-8857-6612baf310e4\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.351252 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fs2s\" (UniqueName: \"kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s\") pod \"83ac5f5c-a558-4671-8857-6612baf310e4\" (UID: \"83ac5f5c-a558-4671-8857-6612baf310e4\") " Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.363560 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s" (OuterVolumeSpecName: "kube-api-access-8fs2s") pod "83ac5f5c-a558-4671-8857-6612baf310e4" (UID: "83ac5f5c-a558-4671-8857-6612baf310e4"). InnerVolumeSpecName "kube-api-access-8fs2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.370017 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities" (OuterVolumeSpecName: "utilities") pod "83ac5f5c-a558-4671-8857-6612baf310e4" (UID: "83ac5f5c-a558-4671-8857-6612baf310e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.413757 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83ac5f5c-a558-4671-8857-6612baf310e4" (UID: "83ac5f5c-a558-4671-8857-6612baf310e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.453491 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.453526 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fs2s\" (UniqueName: \"kubernetes.io/projected/83ac5f5c-a558-4671-8857-6612baf310e4-kube-api-access-8fs2s\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.453540 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ac5f5c-a558-4671-8857-6612baf310e4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.716451 4603 generic.go:334] "Generic (PLEG): container finished" podID="83ac5f5c-a558-4671-8857-6612baf310e4" containerID="f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c" exitCode=0 Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.716490 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerDied","Data":"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c"} Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.716515 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsw8s" event={"ID":"83ac5f5c-a558-4671-8857-6612baf310e4","Type":"ContainerDied","Data":"ac52079600158f035b8f8d04d379127aeffe9e05fde21ce08223cf44487627b3"} Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.716531 4603 scope.go:117] "RemoveContainer" containerID="f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.716544 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsw8s" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.740216 4603 scope.go:117] "RemoveContainer" containerID="a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.780518 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.782587 4603 scope.go:117] "RemoveContainer" containerID="437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.786354 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lsw8s"] Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.833699 4603 scope.go:117] "RemoveContainer" containerID="f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c" Sep 30 20:34:00 crc kubenswrapper[4603]: E0930 20:34:00.834111 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c\": container with ID starting with f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c not found: ID does not exist" containerID="f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.834218 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c"} err="failed to get container status \"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c\": rpc error: code = NotFound desc = could not find container \"f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c\": container with ID starting with f4ef50f87e13c9f53191d8a3a5f69e68ed4a66111ea6f3b2b81fbd8683b8396c not found: ID does not exist" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.834304 4603 scope.go:117] "RemoveContainer" containerID="a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08" Sep 30 20:34:00 crc kubenswrapper[4603]: E0930 20:34:00.834734 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08\": container with ID starting with a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08 not found: ID does not exist" containerID="a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.834813 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08"} err="failed to get container status \"a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08\": rpc error: code = NotFound desc = could not find container \"a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08\": container with ID starting with a405db1ec3b3425442f03addd7f2d80a3d49931be1f6be2808287b7d307bfe08 not found: ID does not exist" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.834884 4603 scope.go:117] "RemoveContainer" containerID="437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7" Sep 30 20:34:00 crc kubenswrapper[4603]: E0930 20:34:00.835206 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7\": container with ID starting with 437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7 not found: ID does not exist" containerID="437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7" Sep 30 20:34:00 crc kubenswrapper[4603]: I0930 20:34:00.835287 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7"} err="failed to get container status \"437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7\": rpc error: code = NotFound desc = could not find container \"437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7\": container with ID starting with 437deb7302d0e874ac7340fb2fc60c04aea0fe1b3dfce739bc1200202c8dbfb7 not found: ID does not exist" Sep 30 20:34:02 crc kubenswrapper[4603]: I0930 20:34:02.779514 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" path="/var/lib/kubelet/pods/83ac5f5c-a558-4671-8857-6612baf310e4/volumes" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.048818 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:03 crc kubenswrapper[4603]: E0930 20:34:03.049198 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="extract-content" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.049214 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="extract-content" Sep 30 20:34:03 crc kubenswrapper[4603]: E0930 20:34:03.049229 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="extract-utilities" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.049235 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="extract-utilities" Sep 30 20:34:03 crc kubenswrapper[4603]: E0930 20:34:03.049254 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="registry-server" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.049260 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="registry-server" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.049426 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ac5f5c-a558-4671-8857-6612baf310e4" containerName="registry-server" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.050844 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.096227 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.206713 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.206820 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.206866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j9m7\" (UniqueName: \"kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.308533 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.308620 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j9m7\" (UniqueName: \"kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.308709 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.309084 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.309152 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.346897 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j9m7\" (UniqueName: \"kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7\") pod \"redhat-marketplace-d9n6c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.386335 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.672737 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:03 crc kubenswrapper[4603]: I0930 20:34:03.748344 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerStarted","Data":"0d87843b397f772f9b6fcb0667063b3e21c659cef6bec95a2f954e19715d8f91"} Sep 30 20:34:04 crc kubenswrapper[4603]: I0930 20:34:04.760788 4603 generic.go:334] "Generic (PLEG): container finished" podID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerID="d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1" exitCode=0 Sep 30 20:34:04 crc kubenswrapper[4603]: I0930 20:34:04.760858 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerDied","Data":"d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1"} Sep 30 20:34:06 crc kubenswrapper[4603]: I0930 20:34:06.780761 4603 generic.go:334] "Generic (PLEG): container finished" podID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerID="876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791" exitCode=0 Sep 30 20:34:06 crc kubenswrapper[4603]: I0930 20:34:06.780808 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerDied","Data":"876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791"} Sep 30 20:34:07 crc kubenswrapper[4603]: I0930 20:34:07.818982 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerStarted","Data":"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc"} Sep 30 20:34:07 crc kubenswrapper[4603]: I0930 20:34:07.841615 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d9n6c" podStartSLOduration=2.22831249 podStartE2EDuration="4.84159689s" podCreationTimestamp="2025-09-30 20:34:03 +0000 UTC" firstStartedPulling="2025-09-30 20:34:04.762978171 +0000 UTC m=+2846.701437009" lastFinishedPulling="2025-09-30 20:34:07.376262561 +0000 UTC m=+2849.314721409" observedRunningTime="2025-09-30 20:34:07.835246247 +0000 UTC m=+2849.773705065" watchObservedRunningTime="2025-09-30 20:34:07.84159689 +0000 UTC m=+2849.780055708" Sep 30 20:34:08 crc kubenswrapper[4603]: I0930 20:34:08.441714 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:34:08 crc kubenswrapper[4603]: I0930 20:34:08.441767 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:34:13 crc kubenswrapper[4603]: I0930 20:34:13.386546 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:13 crc kubenswrapper[4603]: I0930 20:34:13.387145 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:13 crc kubenswrapper[4603]: I0930 20:34:13.449536 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:13 crc kubenswrapper[4603]: I0930 20:34:13.916517 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:13 crc kubenswrapper[4603]: I0930 20:34:13.958483 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:15 crc kubenswrapper[4603]: I0930 20:34:15.898594 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d9n6c" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="registry-server" containerID="cri-o://0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc" gracePeriod=2 Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.402014 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.525106 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities\") pod \"ec4379a1-8e81-4599-8829-d887468a3d5c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.525196 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9j9m7\" (UniqueName: \"kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7\") pod \"ec4379a1-8e81-4599-8829-d887468a3d5c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.525414 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content\") pod \"ec4379a1-8e81-4599-8829-d887468a3d5c\" (UID: \"ec4379a1-8e81-4599-8829-d887468a3d5c\") " Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.526457 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities" (OuterVolumeSpecName: "utilities") pod "ec4379a1-8e81-4599-8829-d887468a3d5c" (UID: "ec4379a1-8e81-4599-8829-d887468a3d5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.534311 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7" (OuterVolumeSpecName: "kube-api-access-9j9m7") pod "ec4379a1-8e81-4599-8829-d887468a3d5c" (UID: "ec4379a1-8e81-4599-8829-d887468a3d5c"). InnerVolumeSpecName "kube-api-access-9j9m7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.542700 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec4379a1-8e81-4599-8829-d887468a3d5c" (UID: "ec4379a1-8e81-4599-8829-d887468a3d5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.640185 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.640339 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4379a1-8e81-4599-8829-d887468a3d5c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.640400 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9j9m7\" (UniqueName: \"kubernetes.io/projected/ec4379a1-8e81-4599-8829-d887468a3d5c-kube-api-access-9j9m7\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.912105 4603 generic.go:334] "Generic (PLEG): container finished" podID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerID="0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc" exitCode=0 Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.912488 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerDied","Data":"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc"} Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.912527 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d9n6c" event={"ID":"ec4379a1-8e81-4599-8829-d887468a3d5c","Type":"ContainerDied","Data":"0d87843b397f772f9b6fcb0667063b3e21c659cef6bec95a2f954e19715d8f91"} Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.912552 4603 scope.go:117] "RemoveContainer" containerID="0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.912728 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d9n6c" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.918684 4603 generic.go:334] "Generic (PLEG): container finished" podID="cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" containerID="f3554588e65c051e3b4a5c74972a4988bd86e528e7919cfa56e3d217945ac284" exitCode=0 Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.918729 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" event={"ID":"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9","Type":"ContainerDied","Data":"f3554588e65c051e3b4a5c74972a4988bd86e528e7919cfa56e3d217945ac284"} Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.943030 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.945159 4603 scope.go:117] "RemoveContainer" containerID="876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791" Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.960722 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d9n6c"] Sep 30 20:34:16 crc kubenswrapper[4603]: I0930 20:34:16.985705 4603 scope.go:117] "RemoveContainer" containerID="d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.017957 4603 scope.go:117] "RemoveContainer" containerID="0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc" Sep 30 20:34:17 crc kubenswrapper[4603]: E0930 20:34:17.018443 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc\": container with ID starting with 0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc not found: ID does not exist" containerID="0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.018478 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc"} err="failed to get container status \"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc\": rpc error: code = NotFound desc = could not find container \"0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc\": container with ID starting with 0fe4cf7d4112ddedd7a302bbf8ba0b0fa6288f0454941a84b34912dd255beddc not found: ID does not exist" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.018504 4603 scope.go:117] "RemoveContainer" containerID="876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791" Sep 30 20:34:17 crc kubenswrapper[4603]: E0930 20:34:17.018991 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791\": container with ID starting with 876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791 not found: ID does not exist" containerID="876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.019020 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791"} err="failed to get container status \"876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791\": rpc error: code = NotFound desc = could not find container \"876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791\": container with ID starting with 876bac29c2899ba5053dde96f06e3d7fd7eebb2b1356ce24be03bfc794800791 not found: ID does not exist" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.019059 4603 scope.go:117] "RemoveContainer" containerID="d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1" Sep 30 20:34:17 crc kubenswrapper[4603]: E0930 20:34:17.019369 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1\": container with ID starting with d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1 not found: ID does not exist" containerID="d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1" Sep 30 20:34:17 crc kubenswrapper[4603]: I0930 20:34:17.019392 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1"} err="failed to get container status \"d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1\": rpc error: code = NotFound desc = could not find container \"d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1\": container with ID starting with d0ed0927d9696cffc2193b1f82c1b44a18eeeb518fa5efd5c13eca60a36006a1 not found: ID does not exist" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.357599 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384059 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384268 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384397 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384597 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384716 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384800 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384862 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwv7l\" (UniqueName: \"kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384895 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.384955 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle\") pod \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\" (UID: \"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9\") " Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.428029 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l" (OuterVolumeSpecName: "kube-api-access-mwv7l") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "kube-api-access-mwv7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.429377 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.442797 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.445920 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.449941 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.469853 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.479296 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.483035 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.484397 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory" (OuterVolumeSpecName: "inventory") pod "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" (UID: "cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.496946 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.496977 4603 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.496986 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwv7l\" (UniqueName: \"kubernetes.io/projected/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-kube-api-access-mwv7l\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.496998 4603 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.497009 4603 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.497019 4603 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.497032 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.497045 4603 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.497054 4603 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.775104 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" path="/var/lib/kubelet/pods/ec4379a1-8e81-4599-8829-d887468a3d5c/volumes" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.938567 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" event={"ID":"cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9","Type":"ContainerDied","Data":"b8e7095cd505e71810c56cf8d0cb5a89fe84cc8d269df84da5e3730da36d0ab0"} Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.938604 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8e7095cd505e71810c56cf8d0cb5a89fe84cc8d269df84da5e3730da36d0ab0" Sep 30 20:34:18 crc kubenswrapper[4603]: I0930 20:34:18.938621 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tkcxx" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.088090 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq"] Sep 30 20:34:19 crc kubenswrapper[4603]: E0930 20:34:19.090668 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="extract-utilities" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.090695 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="extract-utilities" Sep 30 20:34:19 crc kubenswrapper[4603]: E0930 20:34:19.090729 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="registry-server" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.090739 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="registry-server" Sep 30 20:34:19 crc kubenswrapper[4603]: E0930 20:34:19.090764 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.090772 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:34:19 crc kubenswrapper[4603]: E0930 20:34:19.090808 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="extract-content" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.090816 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="extract-content" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.091061 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.091090 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec4379a1-8e81-4599-8829-d887468a3d5c" containerName="registry-server" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.091933 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.096506 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.096688 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.096791 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.097301 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x2cvh" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.097414 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.102968 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq"] Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.212020 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.212398 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.212598 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.212750 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86mpf\" (UniqueName: \"kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.212934 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.213008 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.213151 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.314771 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.314844 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.314907 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.314924 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86mpf\" (UniqueName: \"kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.314972 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.315002 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.315043 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.323501 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.323634 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.330633 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.339742 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.358774 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.360772 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.367918 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86mpf\" (UniqueName: \"kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-ltndq\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:19 crc kubenswrapper[4603]: I0930 20:34:19.413616 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:34:20 crc kubenswrapper[4603]: I0930 20:34:20.016919 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq"] Sep 30 20:34:20 crc kubenswrapper[4603]: I0930 20:34:20.958110 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" event={"ID":"d4d2e2e5-2559-4ee2-801c-1c8d9917e367","Type":"ContainerStarted","Data":"4bacac8185be5c963e2ab2893e4fbabb4c0443dc1b77b3bfcedb6202c6b3a2a7"} Sep 30 20:34:20 crc kubenswrapper[4603]: I0930 20:34:20.958417 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" event={"ID":"d4d2e2e5-2559-4ee2-801c-1c8d9917e367","Type":"ContainerStarted","Data":"6c3ed684012a540e692c2deee94ab9c52a9e1b00daf281a6c8b1b1a5118dcbff"} Sep 30 20:34:20 crc kubenswrapper[4603]: I0930 20:34:20.986614 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" podStartSLOduration=1.799582507 podStartE2EDuration="1.986594621s" podCreationTimestamp="2025-09-30 20:34:19 +0000 UTC" firstStartedPulling="2025-09-30 20:34:20.029034138 +0000 UTC m=+2861.967492966" lastFinishedPulling="2025-09-30 20:34:20.216046262 +0000 UTC m=+2862.154505080" observedRunningTime="2025-09-30 20:34:20.979244701 +0000 UTC m=+2862.917703519" watchObservedRunningTime="2025-09-30 20:34:20.986594621 +0000 UTC m=+2862.925053439" Sep 30 20:34:38 crc kubenswrapper[4603]: I0930 20:34:38.441738 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:34:38 crc kubenswrapper[4603]: I0930 20:34:38.442388 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:34:38 crc kubenswrapper[4603]: I0930 20:34:38.442445 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:34:38 crc kubenswrapper[4603]: I0930 20:34:38.443433 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:34:38 crc kubenswrapper[4603]: I0930 20:34:38.443516 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" gracePeriod=600 Sep 30 20:34:38 crc kubenswrapper[4603]: E0930 20:34:38.566552 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:34:39 crc kubenswrapper[4603]: I0930 20:34:39.151912 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" exitCode=0 Sep 30 20:34:39 crc kubenswrapper[4603]: I0930 20:34:39.151980 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe"} Sep 30 20:34:39 crc kubenswrapper[4603]: I0930 20:34:39.152284 4603 scope.go:117] "RemoveContainer" containerID="9ddea93be93ee7cb7db7ec62f37c16c876f7152483c87ead249c0556758785a9" Sep 30 20:34:39 crc kubenswrapper[4603]: I0930 20:34:39.152961 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:34:39 crc kubenswrapper[4603]: E0930 20:34:39.153270 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:34:53 crc kubenswrapper[4603]: I0930 20:34:53.765320 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:34:53 crc kubenswrapper[4603]: E0930 20:34:53.768104 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:35:04 crc kubenswrapper[4603]: I0930 20:35:04.764143 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:35:04 crc kubenswrapper[4603]: E0930 20:35:04.764966 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:35:19 crc kubenswrapper[4603]: I0930 20:35:19.764647 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:35:19 crc kubenswrapper[4603]: E0930 20:35:19.766100 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:35:34 crc kubenswrapper[4603]: I0930 20:35:34.764925 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:35:34 crc kubenswrapper[4603]: E0930 20:35:34.765946 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:35:49 crc kubenswrapper[4603]: I0930 20:35:49.764397 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:35:49 crc kubenswrapper[4603]: E0930 20:35:49.765132 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:36:01 crc kubenswrapper[4603]: I0930 20:36:01.764443 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:36:01 crc kubenswrapper[4603]: E0930 20:36:01.765263 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:36:12 crc kubenswrapper[4603]: I0930 20:36:12.764326 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:36:12 crc kubenswrapper[4603]: E0930 20:36:12.765136 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:36:23 crc kubenswrapper[4603]: I0930 20:36:23.763942 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:36:23 crc kubenswrapper[4603]: E0930 20:36:23.766062 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:36:36 crc kubenswrapper[4603]: I0930 20:36:36.764615 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:36:36 crc kubenswrapper[4603]: E0930 20:36:36.765392 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:36:50 crc kubenswrapper[4603]: I0930 20:36:50.764239 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:36:50 crc kubenswrapper[4603]: E0930 20:36:50.765186 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:37:03 crc kubenswrapper[4603]: I0930 20:37:03.764400 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:37:03 crc kubenswrapper[4603]: E0930 20:37:03.765489 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:37:14 crc kubenswrapper[4603]: I0930 20:37:14.764909 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:37:14 crc kubenswrapper[4603]: E0930 20:37:14.765713 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:37:29 crc kubenswrapper[4603]: I0930 20:37:29.764301 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:37:29 crc kubenswrapper[4603]: E0930 20:37:29.765036 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:37:42 crc kubenswrapper[4603]: I0930 20:37:42.764882 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:37:42 crc kubenswrapper[4603]: E0930 20:37:42.766060 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:37:53 crc kubenswrapper[4603]: I0930 20:37:53.763928 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:37:53 crc kubenswrapper[4603]: E0930 20:37:53.764700 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:38:04 crc kubenswrapper[4603]: I0930 20:38:04.764641 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:38:04 crc kubenswrapper[4603]: E0930 20:38:04.765295 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:38:16 crc kubenswrapper[4603]: I0930 20:38:16.200561 4603 generic.go:334] "Generic (PLEG): container finished" podID="d4d2e2e5-2559-4ee2-801c-1c8d9917e367" containerID="4bacac8185be5c963e2ab2893e4fbabb4c0443dc1b77b3bfcedb6202c6b3a2a7" exitCode=0 Sep 30 20:38:16 crc kubenswrapper[4603]: I0930 20:38:16.201114 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" event={"ID":"d4d2e2e5-2559-4ee2-801c-1c8d9917e367","Type":"ContainerDied","Data":"4bacac8185be5c963e2ab2893e4fbabb4c0443dc1b77b3bfcedb6202c6b3a2a7"} Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.680008 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822137 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86mpf\" (UniqueName: \"kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822204 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822241 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822270 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822313 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822389 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.822405 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory\") pod \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\" (UID: \"d4d2e2e5-2559-4ee2-801c-1c8d9917e367\") " Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.828285 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf" (OuterVolumeSpecName: "kube-api-access-86mpf") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "kube-api-access-86mpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.841645 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.853617 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.853729 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.859489 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.861362 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory" (OuterVolumeSpecName: "inventory") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.865410 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "d4d2e2e5-2559-4ee2-801c-1c8d9917e367" (UID: "d4d2e2e5-2559-4ee2-801c-1c8d9917e367"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927409 4603 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927446 4603 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927458 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86mpf\" (UniqueName: \"kubernetes.io/projected/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-kube-api-access-86mpf\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927468 4603 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927477 4603 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927485 4603 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:17 crc kubenswrapper[4603]: I0930 20:38:17.927493 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4d2e2e5-2559-4ee2-801c-1c8d9917e367-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:38:18 crc kubenswrapper[4603]: I0930 20:38:18.219470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" event={"ID":"d4d2e2e5-2559-4ee2-801c-1c8d9917e367","Type":"ContainerDied","Data":"6c3ed684012a540e692c2deee94ab9c52a9e1b00daf281a6c8b1b1a5118dcbff"} Sep 30 20:38:18 crc kubenswrapper[4603]: I0930 20:38:18.219510 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c3ed684012a540e692c2deee94ab9c52a9e1b00daf281a6c8b1b1a5118dcbff" Sep 30 20:38:18 crc kubenswrapper[4603]: I0930 20:38:18.219552 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-ltndq" Sep 30 20:38:18 crc kubenswrapper[4603]: I0930 20:38:18.771437 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:38:18 crc kubenswrapper[4603]: E0930 20:38:18.772469 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:38:23 crc kubenswrapper[4603]: E0930 20:38:23.764298 4603 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.129:39498->38.102.83.129:39427: write tcp 38.102.83.129:39498->38.102.83.129:39427: write: broken pipe Sep 30 20:38:32 crc kubenswrapper[4603]: I0930 20:38:32.764687 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:38:32 crc kubenswrapper[4603]: E0930 20:38:32.765312 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:38:45 crc kubenswrapper[4603]: I0930 20:38:45.764734 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:38:45 crc kubenswrapper[4603]: E0930 20:38:45.765591 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:38:57 crc kubenswrapper[4603]: I0930 20:38:57.764413 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:38:57 crc kubenswrapper[4603]: E0930 20:38:57.765113 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:39:12 crc kubenswrapper[4603]: I0930 20:39:12.764620 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:39:12 crc kubenswrapper[4603]: E0930 20:39:12.765530 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.073612 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 20:39:20 crc kubenswrapper[4603]: E0930 20:39:20.074473 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4d2e2e5-2559-4ee2-801c-1c8d9917e367" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.074488 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4d2e2e5-2559-4ee2-801c-1c8d9917e367" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.074651 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4d2e2e5-2559-4ee2-801c-1c8d9917e367" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.075240 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.077220 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.077554 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.078390 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nflcc" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.078780 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.098968 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150530 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150605 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150660 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150768 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150793 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150866 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150901 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4b5w\" (UniqueName: \"kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.150974 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.151090 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.252558 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.252867 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4b5w\" (UniqueName: \"kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253024 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253210 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253409 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253531 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253663 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253789 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.253893 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.255008 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.255894 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.256311 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.256535 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.264380 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.264556 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.271715 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.272063 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.272891 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4b5w\" (UniqueName: \"kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.307844 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " pod="openstack/tempest-tests-tempest" Sep 30 20:39:20 crc kubenswrapper[4603]: I0930 20:39:20.400001 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 20:39:21 crc kubenswrapper[4603]: I0930 20:39:20.833193 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 20:39:21 crc kubenswrapper[4603]: I0930 20:39:20.839999 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:39:21 crc kubenswrapper[4603]: I0930 20:39:20.865147 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2c3f4ced-b58c-409a-a046-b0803bdd6d44","Type":"ContainerStarted","Data":"ea1cd62fb6df6f5a9508d065e68bbb4043595159553e885a9f902899dd03be66"} Sep 30 20:39:25 crc kubenswrapper[4603]: I0930 20:39:25.764640 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:39:25 crc kubenswrapper[4603]: E0930 20:39:25.765391 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:39:37 crc kubenswrapper[4603]: I0930 20:39:37.764908 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:39:37 crc kubenswrapper[4603]: E0930 20:39:37.767105 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:39:52 crc kubenswrapper[4603]: I0930 20:39:52.765299 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:39:54 crc kubenswrapper[4603]: E0930 20:39:54.900018 4603 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 30 20:39:54 crc kubenswrapper[4603]: E0930 20:39:54.910454 4603 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k4b5w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(2c3f4ced-b58c-409a-a046-b0803bdd6d44): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:39:54 crc kubenswrapper[4603]: E0930 20:39:54.911650 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" Sep 30 20:39:55 crc kubenswrapper[4603]: I0930 20:39:55.186917 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb"} Sep 30 20:39:55 crc kubenswrapper[4603]: E0930 20:39:55.188812 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" Sep 30 20:40:07 crc kubenswrapper[4603]: I0930 20:40:07.242990 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 20:40:09 crc kubenswrapper[4603]: I0930 20:40:09.323724 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2c3f4ced-b58c-409a-a046-b0803bdd6d44","Type":"ContainerStarted","Data":"080a0cc939faad9507b5ecab3c26b2b8ebffe49a77db8508211f83897604b4f5"} Sep 30 20:40:09 crc kubenswrapper[4603]: I0930 20:40:09.346934 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.946926236 podStartE2EDuration="50.346911037s" podCreationTimestamp="2025-09-30 20:39:19 +0000 UTC" firstStartedPulling="2025-09-30 20:39:20.83980898 +0000 UTC m=+3162.778267798" lastFinishedPulling="2025-09-30 20:40:07.239793761 +0000 UTC m=+3209.178252599" observedRunningTime="2025-09-30 20:40:09.346878066 +0000 UTC m=+3211.285336914" watchObservedRunningTime="2025-09-30 20:40:09.346911037 +0000 UTC m=+3211.285369875" Sep 30 20:42:08 crc kubenswrapper[4603]: I0930 20:42:08.442023 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:42:08 crc kubenswrapper[4603]: I0930 20:42:08.442556 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.499832 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.503135 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.520005 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.673289 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.673929 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.674101 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98tmt\" (UniqueName: \"kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.775710 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.776197 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.776320 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.776330 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98tmt\" (UniqueName: \"kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.776640 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.810374 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98tmt\" (UniqueName: \"kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt\") pod \"redhat-operators-mmz7v\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:26 crc kubenswrapper[4603]: I0930 20:42:26.828343 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:27 crc kubenswrapper[4603]: I0930 20:42:27.597815 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:42:27 crc kubenswrapper[4603]: I0930 20:42:27.644802 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerStarted","Data":"927157b707610c654706195aee1a8ac7db52559708f566c9651d0253e74c4db6"} Sep 30 20:42:28 crc kubenswrapper[4603]: I0930 20:42:28.655067 4603 generic.go:334] "Generic (PLEG): container finished" podID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerID="b7b00a03c659ffadd653f04d325e20c3e3d92c7d6ca541ce9cb28852571508d0" exitCode=0 Sep 30 20:42:28 crc kubenswrapper[4603]: I0930 20:42:28.655412 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerDied","Data":"b7b00a03c659ffadd653f04d325e20c3e3d92c7d6ca541ce9cb28852571508d0"} Sep 30 20:42:29 crc kubenswrapper[4603]: I0930 20:42:29.668442 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerStarted","Data":"ffa8f0a446617330badf89af26dd2b3aa0ad97979e4c3a35d0ce2d431d88e65f"} Sep 30 20:42:33 crc kubenswrapper[4603]: I0930 20:42:33.716784 4603 generic.go:334] "Generic (PLEG): container finished" podID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerID="ffa8f0a446617330badf89af26dd2b3aa0ad97979e4c3a35d0ce2d431d88e65f" exitCode=0 Sep 30 20:42:33 crc kubenswrapper[4603]: I0930 20:42:33.716851 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerDied","Data":"ffa8f0a446617330badf89af26dd2b3aa0ad97979e4c3a35d0ce2d431d88e65f"} Sep 30 20:42:34 crc kubenswrapper[4603]: I0930 20:42:34.729139 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerStarted","Data":"9deb7ac72232b2afbfac9fbd37d64d3d8ba075f453de36657e8d405ba00f5f51"} Sep 30 20:42:34 crc kubenswrapper[4603]: I0930 20:42:34.758569 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mmz7v" podStartSLOduration=3.194509421 podStartE2EDuration="8.758551679s" podCreationTimestamp="2025-09-30 20:42:26 +0000 UTC" firstStartedPulling="2025-09-30 20:42:28.657961075 +0000 UTC m=+3350.596419893" lastFinishedPulling="2025-09-30 20:42:34.222003333 +0000 UTC m=+3356.160462151" observedRunningTime="2025-09-30 20:42:34.747925375 +0000 UTC m=+3356.686384193" watchObservedRunningTime="2025-09-30 20:42:34.758551679 +0000 UTC m=+3356.697010497" Sep 30 20:42:36 crc kubenswrapper[4603]: I0930 20:42:36.829077 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:36 crc kubenswrapper[4603]: I0930 20:42:36.829487 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:37 crc kubenswrapper[4603]: I0930 20:42:37.877824 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mmz7v" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" probeResult="failure" output=< Sep 30 20:42:37 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:42:37 crc kubenswrapper[4603]: > Sep 30 20:42:38 crc kubenswrapper[4603]: I0930 20:42:38.441509 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:42:38 crc kubenswrapper[4603]: I0930 20:42:38.441575 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:42:47 crc kubenswrapper[4603]: I0930 20:42:47.875922 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mmz7v" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" probeResult="failure" output=< Sep 30 20:42:47 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:42:47 crc kubenswrapper[4603]: > Sep 30 20:42:56 crc kubenswrapper[4603]: I0930 20:42:56.893312 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:56 crc kubenswrapper[4603]: I0930 20:42:56.971733 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:57 crc kubenswrapper[4603]: I0930 20:42:57.706930 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:42:57 crc kubenswrapper[4603]: I0930 20:42:57.949498 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mmz7v" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" containerID="cri-o://9deb7ac72232b2afbfac9fbd37d64d3d8ba075f453de36657e8d405ba00f5f51" gracePeriod=2 Sep 30 20:42:58 crc kubenswrapper[4603]: I0930 20:42:58.962434 4603 generic.go:334] "Generic (PLEG): container finished" podID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerID="9deb7ac72232b2afbfac9fbd37d64d3d8ba075f453de36657e8d405ba00f5f51" exitCode=0 Sep 30 20:42:58 crc kubenswrapper[4603]: I0930 20:42:58.962715 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerDied","Data":"9deb7ac72232b2afbfac9fbd37d64d3d8ba075f453de36657e8d405ba00f5f51"} Sep 30 20:42:58 crc kubenswrapper[4603]: I0930 20:42:58.962743 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mmz7v" event={"ID":"2f99906c-cbd5-4148-8bee-0b51fa71a8a4","Type":"ContainerDied","Data":"927157b707610c654706195aee1a8ac7db52559708f566c9651d0253e74c4db6"} Sep 30 20:42:58 crc kubenswrapper[4603]: I0930 20:42:58.962756 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="927157b707610c654706195aee1a8ac7db52559708f566c9651d0253e74c4db6" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.032745 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.176484 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities\") pod \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.176586 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content\") pod \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.176655 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98tmt\" (UniqueName: \"kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt\") pod \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\" (UID: \"2f99906c-cbd5-4148-8bee-0b51fa71a8a4\") " Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.178285 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities" (OuterVolumeSpecName: "utilities") pod "2f99906c-cbd5-4148-8bee-0b51fa71a8a4" (UID: "2f99906c-cbd5-4148-8bee-0b51fa71a8a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.198205 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt" (OuterVolumeSpecName: "kube-api-access-98tmt") pod "2f99906c-cbd5-4148-8bee-0b51fa71a8a4" (UID: "2f99906c-cbd5-4148-8bee-0b51fa71a8a4"). InnerVolumeSpecName "kube-api-access-98tmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.261862 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f99906c-cbd5-4148-8bee-0b51fa71a8a4" (UID: "2f99906c-cbd5-4148-8bee-0b51fa71a8a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.279435 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98tmt\" (UniqueName: \"kubernetes.io/projected/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-kube-api-access-98tmt\") on node \"crc\" DevicePath \"\"" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.279476 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.279488 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f99906c-cbd5-4148-8bee-0b51fa71a8a4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:42:59 crc kubenswrapper[4603]: I0930 20:42:59.972022 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mmz7v" Sep 30 20:43:00 crc kubenswrapper[4603]: I0930 20:43:00.008386 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:43:00 crc kubenswrapper[4603]: I0930 20:43:00.018254 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mmz7v"] Sep 30 20:43:00 crc kubenswrapper[4603]: I0930 20:43:00.775315 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" path="/var/lib/kubelet/pods/2f99906c-cbd5-4148-8bee-0b51fa71a8a4/volumes" Sep 30 20:43:08 crc kubenswrapper[4603]: I0930 20:43:08.441607 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:43:08 crc kubenswrapper[4603]: I0930 20:43:08.442102 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:43:08 crc kubenswrapper[4603]: I0930 20:43:08.442138 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:43:08 crc kubenswrapper[4603]: I0930 20:43:08.442803 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:43:08 crc kubenswrapper[4603]: I0930 20:43:08.442855 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb" gracePeriod=600 Sep 30 20:43:09 crc kubenswrapper[4603]: I0930 20:43:09.056346 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb" exitCode=0 Sep 30 20:43:09 crc kubenswrapper[4603]: I0930 20:43:09.056434 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb"} Sep 30 20:43:09 crc kubenswrapper[4603]: I0930 20:43:09.056875 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762"} Sep 30 20:43:09 crc kubenswrapper[4603]: I0930 20:43:09.056919 4603 scope.go:117] "RemoveContainer" containerID="89a2d032ee7d6c50e5e69c593123ef8c6f0c3a0bbaa23d495b068a380bae03fe" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.628406 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:43:56 crc kubenswrapper[4603]: E0930 20:43:56.629361 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="extract-utilities" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.629376 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="extract-utilities" Sep 30 20:43:56 crc kubenswrapper[4603]: E0930 20:43:56.629398 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.629404 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" Sep 30 20:43:56 crc kubenswrapper[4603]: E0930 20:43:56.629416 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="extract-content" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.629422 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="extract-content" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.629632 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f99906c-cbd5-4148-8bee-0b51fa71a8a4" containerName="registry-server" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.630846 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.643936 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.707173 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.707246 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.707436 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g42tw\" (UniqueName: \"kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.812160 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g42tw\" (UniqueName: \"kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.812639 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.812759 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.813211 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.813282 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.837363 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g42tw\" (UniqueName: \"kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw\") pod \"certified-operators-jvb9q\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:56 crc kubenswrapper[4603]: I0930 20:43:56.958845 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:43:57 crc kubenswrapper[4603]: I0930 20:43:57.584508 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:43:58 crc kubenswrapper[4603]: I0930 20:43:58.506556 4603 generic.go:334] "Generic (PLEG): container finished" podID="72c511f4-c601-4269-9d74-815f24b010e5" containerID="514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b" exitCode=0 Sep 30 20:43:58 crc kubenswrapper[4603]: I0930 20:43:58.506661 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerDied","Data":"514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b"} Sep 30 20:43:58 crc kubenswrapper[4603]: I0930 20:43:58.506921 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerStarted","Data":"dd14a56012e3563617e47f00dbb5f37b213b1eeba328a79d76dc5c648c229c38"} Sep 30 20:44:00 crc kubenswrapper[4603]: I0930 20:44:00.528686 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerStarted","Data":"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866"} Sep 30 20:44:01 crc kubenswrapper[4603]: I0930 20:44:01.538110 4603 generic.go:334] "Generic (PLEG): container finished" podID="72c511f4-c601-4269-9d74-815f24b010e5" containerID="23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866" exitCode=0 Sep 30 20:44:01 crc kubenswrapper[4603]: I0930 20:44:01.538144 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerDied","Data":"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866"} Sep 30 20:44:02 crc kubenswrapper[4603]: I0930 20:44:02.555181 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerStarted","Data":"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc"} Sep 30 20:44:02 crc kubenswrapper[4603]: I0930 20:44:02.579184 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jvb9q" podStartSLOduration=3.112316935 podStartE2EDuration="6.579143453s" podCreationTimestamp="2025-09-30 20:43:56 +0000 UTC" firstStartedPulling="2025-09-30 20:43:58.510134704 +0000 UTC m=+3440.448593532" lastFinishedPulling="2025-09-30 20:44:01.976961232 +0000 UTC m=+3443.915420050" observedRunningTime="2025-09-30 20:44:02.571270115 +0000 UTC m=+3444.509728933" watchObservedRunningTime="2025-09-30 20:44:02.579143453 +0000 UTC m=+3444.517602271" Sep 30 20:44:06 crc kubenswrapper[4603]: I0930 20:44:06.960043 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:06 crc kubenswrapper[4603]: I0930 20:44:06.960599 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:07 crc kubenswrapper[4603]: I0930 20:44:07.005977 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:07 crc kubenswrapper[4603]: I0930 20:44:07.655124 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:07 crc kubenswrapper[4603]: I0930 20:44:07.702905 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:44:09 crc kubenswrapper[4603]: I0930 20:44:09.607706 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jvb9q" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="registry-server" containerID="cri-o://28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc" gracePeriod=2 Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.408174 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.511512 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities\") pod \"72c511f4-c601-4269-9d74-815f24b010e5\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.512916 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities" (OuterVolumeSpecName: "utilities") pod "72c511f4-c601-4269-9d74-815f24b010e5" (UID: "72c511f4-c601-4269-9d74-815f24b010e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.513945 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.615072 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g42tw\" (UniqueName: \"kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw\") pod \"72c511f4-c601-4269-9d74-815f24b010e5\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.615122 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content\") pod \"72c511f4-c601-4269-9d74-815f24b010e5\" (UID: \"72c511f4-c601-4269-9d74-815f24b010e5\") " Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.619305 4603 generic.go:334] "Generic (PLEG): container finished" podID="72c511f4-c601-4269-9d74-815f24b010e5" containerID="28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc" exitCode=0 Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.619374 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jvb9q" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.619410 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerDied","Data":"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc"} Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.619681 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jvb9q" event={"ID":"72c511f4-c601-4269-9d74-815f24b010e5","Type":"ContainerDied","Data":"dd14a56012e3563617e47f00dbb5f37b213b1eeba328a79d76dc5c648c229c38"} Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.619706 4603 scope.go:117] "RemoveContainer" containerID="28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.622051 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw" (OuterVolumeSpecName: "kube-api-access-g42tw") pod "72c511f4-c601-4269-9d74-815f24b010e5" (UID: "72c511f4-c601-4269-9d74-815f24b010e5"). InnerVolumeSpecName "kube-api-access-g42tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.668128 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72c511f4-c601-4269-9d74-815f24b010e5" (UID: "72c511f4-c601-4269-9d74-815f24b010e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.709892 4603 scope.go:117] "RemoveContainer" containerID="23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.717329 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g42tw\" (UniqueName: \"kubernetes.io/projected/72c511f4-c601-4269-9d74-815f24b010e5-kube-api-access-g42tw\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.717364 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72c511f4-c601-4269-9d74-815f24b010e5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.760815 4603 scope.go:117] "RemoveContainer" containerID="514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.801888 4603 scope.go:117] "RemoveContainer" containerID="28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc" Sep 30 20:44:10 crc kubenswrapper[4603]: E0930 20:44:10.802273 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc\": container with ID starting with 28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc not found: ID does not exist" containerID="28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.802307 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc"} err="failed to get container status \"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc\": rpc error: code = NotFound desc = could not find container \"28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc\": container with ID starting with 28fb5f911ac4957b024faf6d0d5c4ac5241d76dda657cc3134bf36f5c5ee15bc not found: ID does not exist" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.802364 4603 scope.go:117] "RemoveContainer" containerID="23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866" Sep 30 20:44:10 crc kubenswrapper[4603]: E0930 20:44:10.802574 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866\": container with ID starting with 23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866 not found: ID does not exist" containerID="23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.802604 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866"} err="failed to get container status \"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866\": rpc error: code = NotFound desc = could not find container \"23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866\": container with ID starting with 23e31ae9b852552374ed350f0b655ac1e2cb4b0b1b3fee78dee3ea924382a866 not found: ID does not exist" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.802620 4603 scope.go:117] "RemoveContainer" containerID="514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b" Sep 30 20:44:10 crc kubenswrapper[4603]: E0930 20:44:10.802996 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b\": container with ID starting with 514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b not found: ID does not exist" containerID="514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.803021 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b"} err="failed to get container status \"514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b\": rpc error: code = NotFound desc = could not find container \"514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b\": container with ID starting with 514a5ac0c76d9dbe0402b42cdc77f77db69d40aada5c91ab8005a6d0ffc3773b not found: ID does not exist" Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.942145 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:44:10 crc kubenswrapper[4603]: I0930 20:44:10.951065 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jvb9q"] Sep 30 20:44:12 crc kubenswrapper[4603]: I0930 20:44:12.774891 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72c511f4-c601-4269-9d74-815f24b010e5" path="/var/lib/kubelet/pods/72c511f4-c601-4269-9d74-815f24b010e5/volumes" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.088469 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:26 crc kubenswrapper[4603]: E0930 20:44:26.089304 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="extract-content" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.089316 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="extract-content" Sep 30 20:44:26 crc kubenswrapper[4603]: E0930 20:44:26.089337 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="registry-server" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.089344 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="registry-server" Sep 30 20:44:26 crc kubenswrapper[4603]: E0930 20:44:26.089450 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="extract-utilities" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.089459 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="extract-utilities" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.089634 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="72c511f4-c601-4269-9d74-815f24b010e5" containerName="registry-server" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.090979 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.102867 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.237932 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2kp8\" (UniqueName: \"kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.238265 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.238315 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.340100 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kp8\" (UniqueName: \"kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.340284 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.340311 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.340854 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.341025 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.359963 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kp8\" (UniqueName: \"kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8\") pod \"redhat-marketplace-qtptc\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.426268 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:26 crc kubenswrapper[4603]: I0930 20:44:26.932047 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:27 crc kubenswrapper[4603]: I0930 20:44:27.783797 4603 generic.go:334] "Generic (PLEG): container finished" podID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerID="1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a" exitCode=0 Sep 30 20:44:27 crc kubenswrapper[4603]: I0930 20:44:27.783896 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerDied","Data":"1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a"} Sep 30 20:44:27 crc kubenswrapper[4603]: I0930 20:44:27.784104 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerStarted","Data":"de500b53f84b91d2775c24580b055ebd04fe8a06ca3c8b96a1c1cbec33cc2fa3"} Sep 30 20:44:27 crc kubenswrapper[4603]: I0930 20:44:27.791666 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:44:28 crc kubenswrapper[4603]: I0930 20:44:28.795643 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerStarted","Data":"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439"} Sep 30 20:44:29 crc kubenswrapper[4603]: I0930 20:44:29.810787 4603 generic.go:334] "Generic (PLEG): container finished" podID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerID="0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439" exitCode=0 Sep 30 20:44:29 crc kubenswrapper[4603]: I0930 20:44:29.811037 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerDied","Data":"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439"} Sep 30 20:44:31 crc kubenswrapper[4603]: I0930 20:44:31.833904 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerStarted","Data":"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574"} Sep 30 20:44:31 crc kubenswrapper[4603]: I0930 20:44:31.862148 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qtptc" podStartSLOduration=2.813788394 podStartE2EDuration="5.862130449s" podCreationTimestamp="2025-09-30 20:44:26 +0000 UTC" firstStartedPulling="2025-09-30 20:44:27.791378392 +0000 UTC m=+3469.729837220" lastFinishedPulling="2025-09-30 20:44:30.839720457 +0000 UTC m=+3472.778179275" observedRunningTime="2025-09-30 20:44:31.853329146 +0000 UTC m=+3473.791787974" watchObservedRunningTime="2025-09-30 20:44:31.862130449 +0000 UTC m=+3473.800589257" Sep 30 20:44:36 crc kubenswrapper[4603]: I0930 20:44:36.426426 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:36 crc kubenswrapper[4603]: I0930 20:44:36.426978 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:36 crc kubenswrapper[4603]: I0930 20:44:36.484120 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:36 crc kubenswrapper[4603]: I0930 20:44:36.932146 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:37 crc kubenswrapper[4603]: I0930 20:44:37.000868 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:38 crc kubenswrapper[4603]: I0930 20:44:38.894757 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qtptc" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="registry-server" containerID="cri-o://62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574" gracePeriod=2 Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.516354 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.683033 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2kp8\" (UniqueName: \"kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8\") pod \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.683457 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content\") pod \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.683531 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities\") pod \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\" (UID: \"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301\") " Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.684333 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities" (OuterVolumeSpecName: "utilities") pod "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" (UID: "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.685028 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.697118 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8" (OuterVolumeSpecName: "kube-api-access-s2kp8") pod "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" (UID: "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301"). InnerVolumeSpecName "kube-api-access-s2kp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.700227 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" (UID: "4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.786876 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2kp8\" (UniqueName: \"kubernetes.io/projected/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-kube-api-access-s2kp8\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.786928 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.903972 4603 generic.go:334] "Generic (PLEG): container finished" podID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerID="62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574" exitCode=0 Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.904029 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtptc" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.904031 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerDied","Data":"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574"} Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.904081 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtptc" event={"ID":"4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301","Type":"ContainerDied","Data":"de500b53f84b91d2775c24580b055ebd04fe8a06ca3c8b96a1c1cbec33cc2fa3"} Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.904100 4603 scope.go:117] "RemoveContainer" containerID="62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.945458 4603 scope.go:117] "RemoveContainer" containerID="0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439" Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.945762 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.957732 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtptc"] Sep 30 20:44:39 crc kubenswrapper[4603]: I0930 20:44:39.971724 4603 scope.go:117] "RemoveContainer" containerID="1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.014549 4603 scope.go:117] "RemoveContainer" containerID="62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574" Sep 30 20:44:40 crc kubenswrapper[4603]: E0930 20:44:40.015010 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574\": container with ID starting with 62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574 not found: ID does not exist" containerID="62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.015052 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574"} err="failed to get container status \"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574\": rpc error: code = NotFound desc = could not find container \"62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574\": container with ID starting with 62f870f6bf5bf93f7e0920b8897aeeddc73eae7a7e6c353ddd1d41ac7fa39574 not found: ID does not exist" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.015078 4603 scope.go:117] "RemoveContainer" containerID="0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439" Sep 30 20:44:40 crc kubenswrapper[4603]: E0930 20:44:40.016078 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439\": container with ID starting with 0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439 not found: ID does not exist" containerID="0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.016188 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439"} err="failed to get container status \"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439\": rpc error: code = NotFound desc = could not find container \"0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439\": container with ID starting with 0b81e1573d18b03e7cb1023bc2faaf17a93c4e2c4d71c6f9fad5c183996fb439 not found: ID does not exist" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.016272 4603 scope.go:117] "RemoveContainer" containerID="1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a" Sep 30 20:44:40 crc kubenswrapper[4603]: E0930 20:44:40.016690 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a\": container with ID starting with 1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a not found: ID does not exist" containerID="1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.016724 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a"} err="failed to get container status \"1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a\": rpc error: code = NotFound desc = could not find container \"1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a\": container with ID starting with 1ef2a26ab56ee0afd0285e012ff0d2767d6606ddb4627cc53c44589dac494e3a not found: ID does not exist" Sep 30 20:44:40 crc kubenswrapper[4603]: I0930 20:44:40.776357 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" path="/var/lib/kubelet/pods/4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301/volumes" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.190882 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq"] Sep 30 20:45:00 crc kubenswrapper[4603]: E0930 20:45:00.193039 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="extract-content" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.193155 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="extract-content" Sep 30 20:45:00 crc kubenswrapper[4603]: E0930 20:45:00.193274 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.193346 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4603]: E0930 20:45:00.193464 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="extract-utilities" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.193542 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="extract-utilities" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.193887 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a8f11c7-f5ec-4ce2-9e91-2d1314b5a301" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.194784 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.203359 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq"] Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.206445 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.207071 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.253231 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.253544 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.253575 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x68xp\" (UniqueName: \"kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.358398 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.358471 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.358504 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x68xp\" (UniqueName: \"kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.360055 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.366904 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.389336 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x68xp\" (UniqueName: \"kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp\") pod \"collect-profiles-29321085-r8hgq\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:00 crc kubenswrapper[4603]: I0930 20:45:00.521712 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:01 crc kubenswrapper[4603]: I0930 20:45:01.063779 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq"] Sep 30 20:45:01 crc kubenswrapper[4603]: I0930 20:45:01.103675 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" event={"ID":"fe9adadf-ec66-45bc-be46-42a097b8d626","Type":"ContainerStarted","Data":"bf264ec200b9f30fbc4ada5074148d2740c770d9761477e8206ec8d99d3ec0ee"} Sep 30 20:45:02 crc kubenswrapper[4603]: I0930 20:45:02.113090 4603 generic.go:334] "Generic (PLEG): container finished" podID="fe9adadf-ec66-45bc-be46-42a097b8d626" containerID="90ddeaf64083e25c962fe30bf828b2a4ddda21b3bf752d7a5d9c3a417e7558ea" exitCode=0 Sep 30 20:45:02 crc kubenswrapper[4603]: I0930 20:45:02.113297 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" event={"ID":"fe9adadf-ec66-45bc-be46-42a097b8d626","Type":"ContainerDied","Data":"90ddeaf64083e25c962fe30bf828b2a4ddda21b3bf752d7a5d9c3a417e7558ea"} Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.638833 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.722774 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume\") pod \"fe9adadf-ec66-45bc-be46-42a097b8d626\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.722841 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x68xp\" (UniqueName: \"kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp\") pod \"fe9adadf-ec66-45bc-be46-42a097b8d626\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.722871 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume\") pod \"fe9adadf-ec66-45bc-be46-42a097b8d626\" (UID: \"fe9adadf-ec66-45bc-be46-42a097b8d626\") " Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.723981 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume" (OuterVolumeSpecName: "config-volume") pod "fe9adadf-ec66-45bc-be46-42a097b8d626" (UID: "fe9adadf-ec66-45bc-be46-42a097b8d626"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.730890 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fe9adadf-ec66-45bc-be46-42a097b8d626" (UID: "fe9adadf-ec66-45bc-be46-42a097b8d626"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.731347 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp" (OuterVolumeSpecName: "kube-api-access-x68xp") pod "fe9adadf-ec66-45bc-be46-42a097b8d626" (UID: "fe9adadf-ec66-45bc-be46-42a097b8d626"). InnerVolumeSpecName "kube-api-access-x68xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.825881 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe9adadf-ec66-45bc-be46-42a097b8d626-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.825913 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe9adadf-ec66-45bc-be46-42a097b8d626-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4603]: I0930 20:45:03.825922 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x68xp\" (UniqueName: \"kubernetes.io/projected/fe9adadf-ec66-45bc-be46-42a097b8d626-kube-api-access-x68xp\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.137204 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" event={"ID":"fe9adadf-ec66-45bc-be46-42a097b8d626","Type":"ContainerDied","Data":"bf264ec200b9f30fbc4ada5074148d2740c770d9761477e8206ec8d99d3ec0ee"} Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.137556 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf264ec200b9f30fbc4ada5074148d2740c770d9761477e8206ec8d99d3ec0ee" Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.137237 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r8hgq" Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.748319 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c"] Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.759051 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321040-5m55c"] Sep 30 20:45:04 crc kubenswrapper[4603]: I0930 20:45:04.776648 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17759eb-d1ee-4cc6-b354-f3914a671cb0" path="/var/lib/kubelet/pods/e17759eb-d1ee-4cc6-b354-f3914a671cb0/volumes" Sep 30 20:45:08 crc kubenswrapper[4603]: I0930 20:45:08.441645 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:45:08 crc kubenswrapper[4603]: I0930 20:45:08.442212 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:45:38 crc kubenswrapper[4603]: I0930 20:45:38.442855 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:45:38 crc kubenswrapper[4603]: I0930 20:45:38.443415 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:45:46 crc kubenswrapper[4603]: I0930 20:45:46.910126 4603 scope.go:117] "RemoveContainer" containerID="94d7a656def7f25ec824e3e6ef803c5741c55eb2e4bc3bdcdeb0423c7fa77d0d" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.441626 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.442156 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.442221 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.442887 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.442946 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" gracePeriod=600 Sep 30 20:46:08 crc kubenswrapper[4603]: E0930 20:46:08.562829 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.743645 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" exitCode=0 Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.743689 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762"} Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.743721 4603 scope.go:117] "RemoveContainer" containerID="bca21ffa9b54bcfd9d9e208ccc6ff10eb319d4b9776172e243f29a03aa0c79cb" Sep 30 20:46:08 crc kubenswrapper[4603]: I0930 20:46:08.744501 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:46:08 crc kubenswrapper[4603]: E0930 20:46:08.744895 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:46:22 crc kubenswrapper[4603]: I0930 20:46:22.764812 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:46:22 crc kubenswrapper[4603]: E0930 20:46:22.765726 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:46:33 crc kubenswrapper[4603]: I0930 20:46:33.764342 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:46:33 crc kubenswrapper[4603]: E0930 20:46:33.765252 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:46:45 crc kubenswrapper[4603]: I0930 20:46:45.764949 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:46:45 crc kubenswrapper[4603]: E0930 20:46:45.765771 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:46:59 crc kubenswrapper[4603]: I0930 20:46:59.764541 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:46:59 crc kubenswrapper[4603]: E0930 20:46:59.766541 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:47:14 crc kubenswrapper[4603]: I0930 20:47:14.764732 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:47:14 crc kubenswrapper[4603]: E0930 20:47:14.765575 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:47:29 crc kubenswrapper[4603]: I0930 20:47:29.764398 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:47:29 crc kubenswrapper[4603]: E0930 20:47:29.765114 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:47:41 crc kubenswrapper[4603]: I0930 20:47:41.766097 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:47:41 crc kubenswrapper[4603]: E0930 20:47:41.767347 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:47:55 crc kubenswrapper[4603]: I0930 20:47:55.765431 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:47:55 crc kubenswrapper[4603]: E0930 20:47:55.766707 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:48:09 crc kubenswrapper[4603]: I0930 20:48:09.764431 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:48:09 crc kubenswrapper[4603]: E0930 20:48:09.765480 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:48:21 crc kubenswrapper[4603]: I0930 20:48:21.763787 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:48:21 crc kubenswrapper[4603]: E0930 20:48:21.764508 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:48:36 crc kubenswrapper[4603]: I0930 20:48:36.764254 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:48:36 crc kubenswrapper[4603]: E0930 20:48:36.765191 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:48:47 crc kubenswrapper[4603]: I0930 20:48:47.039516 4603 scope.go:117] "RemoveContainer" containerID="ffa8f0a446617330badf89af26dd2b3aa0ad97979e4c3a35d0ce2d431d88e65f" Sep 30 20:48:47 crc kubenswrapper[4603]: I0930 20:48:47.066002 4603 scope.go:117] "RemoveContainer" containerID="9deb7ac72232b2afbfac9fbd37d64d3d8ba075f453de36657e8d405ba00f5f51" Sep 30 20:48:47 crc kubenswrapper[4603]: I0930 20:48:47.138305 4603 scope.go:117] "RemoveContainer" containerID="b7b00a03c659ffadd653f04d325e20c3e3d92c7d6ca541ce9cb28852571508d0" Sep 30 20:48:49 crc kubenswrapper[4603]: I0930 20:48:49.764649 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:48:49 crc kubenswrapper[4603]: E0930 20:48:49.765392 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:49:01 crc kubenswrapper[4603]: I0930 20:49:01.763942 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:49:01 crc kubenswrapper[4603]: E0930 20:49:01.764790 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:49:16 crc kubenswrapper[4603]: I0930 20:49:16.764574 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:49:16 crc kubenswrapper[4603]: E0930 20:49:16.765461 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:49:29 crc kubenswrapper[4603]: I0930 20:49:29.764698 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:49:29 crc kubenswrapper[4603]: E0930 20:49:29.765422 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:49:44 crc kubenswrapper[4603]: I0930 20:49:44.765327 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:49:44 crc kubenswrapper[4603]: E0930 20:49:44.766025 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:49:56 crc kubenswrapper[4603]: I0930 20:49:56.765256 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:49:56 crc kubenswrapper[4603]: E0930 20:49:56.766379 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:50:09 crc kubenswrapper[4603]: I0930 20:50:09.765102 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:50:09 crc kubenswrapper[4603]: E0930 20:50:09.765959 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:50:20 crc kubenswrapper[4603]: I0930 20:50:20.764804 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:50:20 crc kubenswrapper[4603]: E0930 20:50:20.766790 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:50:35 crc kubenswrapper[4603]: I0930 20:50:35.764920 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:50:35 crc kubenswrapper[4603]: E0930 20:50:35.765827 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:50:50 crc kubenswrapper[4603]: I0930 20:50:50.764474 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:50:50 crc kubenswrapper[4603]: E0930 20:50:50.765626 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:51:02 crc kubenswrapper[4603]: I0930 20:51:02.764524 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:51:02 crc kubenswrapper[4603]: E0930 20:51:02.765556 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:51:14 crc kubenswrapper[4603]: I0930 20:51:14.765053 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:51:15 crc kubenswrapper[4603]: I0930 20:51:15.600331 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89"} Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.378308 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:53:36 crc kubenswrapper[4603]: E0930 20:53:36.379231 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9adadf-ec66-45bc-be46-42a097b8d626" containerName="collect-profiles" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.379244 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9adadf-ec66-45bc-be46-42a097b8d626" containerName="collect-profiles" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.379439 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe9adadf-ec66-45bc-be46-42a097b8d626" containerName="collect-profiles" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.380863 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.430998 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.483079 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.483421 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4gw9\" (UniqueName: \"kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.483577 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.585449 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.585566 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.585659 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4gw9\" (UniqueName: \"kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.587683 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.588454 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:36 crc kubenswrapper[4603]: I0930 20:53:36.844384 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4gw9\" (UniqueName: \"kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9\") pod \"redhat-operators-tzrrq\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.002462 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.484622 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.861849 4603 generic.go:334] "Generic (PLEG): container finished" podID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerID="3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3" exitCode=0 Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.862000 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerDied","Data":"3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3"} Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.862152 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerStarted","Data":"788d3d1415a8c429fc2f6a117c7f253c91e9ae8e2728fef5b73f34652ae9ae59"} Sep 30 20:53:37 crc kubenswrapper[4603]: I0930 20:53:37.863900 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:53:38 crc kubenswrapper[4603]: I0930 20:53:38.457039 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:53:38 crc kubenswrapper[4603]: I0930 20:53:38.457110 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:53:39 crc kubenswrapper[4603]: I0930 20:53:39.888228 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerStarted","Data":"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc"} Sep 30 20:53:43 crc kubenswrapper[4603]: I0930 20:53:43.922582 4603 generic.go:334] "Generic (PLEG): container finished" podID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerID="0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc" exitCode=0 Sep 30 20:53:43 crc kubenswrapper[4603]: I0930 20:53:43.922679 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerDied","Data":"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc"} Sep 30 20:53:44 crc kubenswrapper[4603]: I0930 20:53:44.932404 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerStarted","Data":"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344"} Sep 30 20:53:44 crc kubenswrapper[4603]: I0930 20:53:44.959247 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tzrrq" podStartSLOduration=2.428311331 podStartE2EDuration="8.959223931s" podCreationTimestamp="2025-09-30 20:53:36 +0000 UTC" firstStartedPulling="2025-09-30 20:53:37.863659147 +0000 UTC m=+4019.802117955" lastFinishedPulling="2025-09-30 20:53:44.394571737 +0000 UTC m=+4026.333030555" observedRunningTime="2025-09-30 20:53:44.950964992 +0000 UTC m=+4026.889423810" watchObservedRunningTime="2025-09-30 20:53:44.959223931 +0000 UTC m=+4026.897682759" Sep 30 20:53:47 crc kubenswrapper[4603]: I0930 20:53:47.003705 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:47 crc kubenswrapper[4603]: I0930 20:53:47.003999 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:48 crc kubenswrapper[4603]: I0930 20:53:48.090746 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tzrrq" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="registry-server" probeResult="failure" output=< Sep 30 20:53:48 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 20:53:48 crc kubenswrapper[4603]: > Sep 30 20:53:57 crc kubenswrapper[4603]: I0930 20:53:57.400856 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:57 crc kubenswrapper[4603]: I0930 20:53:57.467879 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:57 crc kubenswrapper[4603]: I0930 20:53:57.643689 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.054354 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tzrrq" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="registry-server" containerID="cri-o://ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344" gracePeriod=2 Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.814523 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.938872 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities\") pod \"cd79136d-8adb-455e-864d-81d011ca7b4c\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.939259 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content\") pod \"cd79136d-8adb-455e-864d-81d011ca7b4c\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.939297 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4gw9\" (UniqueName: \"kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9\") pod \"cd79136d-8adb-455e-864d-81d011ca7b4c\" (UID: \"cd79136d-8adb-455e-864d-81d011ca7b4c\") " Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.940728 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities" (OuterVolumeSpecName: "utilities") pod "cd79136d-8adb-455e-864d-81d011ca7b4c" (UID: "cd79136d-8adb-455e-864d-81d011ca7b4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:53:59 crc kubenswrapper[4603]: I0930 20:53:59.948463 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9" (OuterVolumeSpecName: "kube-api-access-v4gw9") pod "cd79136d-8adb-455e-864d-81d011ca7b4c" (UID: "cd79136d-8adb-455e-864d-81d011ca7b4c"). InnerVolumeSpecName "kube-api-access-v4gw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.030763 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd79136d-8adb-455e-864d-81d011ca7b4c" (UID: "cd79136d-8adb-455e-864d-81d011ca7b4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.040948 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.040972 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd79136d-8adb-455e-864d-81d011ca7b4c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.040985 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4gw9\" (UniqueName: \"kubernetes.io/projected/cd79136d-8adb-455e-864d-81d011ca7b4c-kube-api-access-v4gw9\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.068619 4603 generic.go:334] "Generic (PLEG): container finished" podID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerID="ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344" exitCode=0 Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.068708 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerDied","Data":"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344"} Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.068714 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tzrrq" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.070364 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tzrrq" event={"ID":"cd79136d-8adb-455e-864d-81d011ca7b4c","Type":"ContainerDied","Data":"788d3d1415a8c429fc2f6a117c7f253c91e9ae8e2728fef5b73f34652ae9ae59"} Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.070406 4603 scope.go:117] "RemoveContainer" containerID="ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.116459 4603 scope.go:117] "RemoveContainer" containerID="0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.120543 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.127840 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tzrrq"] Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.148596 4603 scope.go:117] "RemoveContainer" containerID="3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.202771 4603 scope.go:117] "RemoveContainer" containerID="ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344" Sep 30 20:54:00 crc kubenswrapper[4603]: E0930 20:54:00.203075 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344\": container with ID starting with ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344 not found: ID does not exist" containerID="ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.203117 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344"} err="failed to get container status \"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344\": rpc error: code = NotFound desc = could not find container \"ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344\": container with ID starting with ceabf914867e45741b3aeaa6ae35c0e8b0dcd58ff46dad84b3031c5d438b9344 not found: ID does not exist" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.203144 4603 scope.go:117] "RemoveContainer" containerID="0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc" Sep 30 20:54:00 crc kubenswrapper[4603]: E0930 20:54:00.203441 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc\": container with ID starting with 0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc not found: ID does not exist" containerID="0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.203463 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc"} err="failed to get container status \"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc\": rpc error: code = NotFound desc = could not find container \"0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc\": container with ID starting with 0235322756d1cf710ce07dd4dfebfa67b0b17cffc2a5718f2818c0f04200d5fc not found: ID does not exist" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.203485 4603 scope.go:117] "RemoveContainer" containerID="3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3" Sep 30 20:54:00 crc kubenswrapper[4603]: E0930 20:54:00.203825 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3\": container with ID starting with 3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3 not found: ID does not exist" containerID="3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.203839 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3"} err="failed to get container status \"3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3\": rpc error: code = NotFound desc = could not find container \"3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3\": container with ID starting with 3b6690e1017d59fcb46edfbeab100af9248eb4eb7ca7a9ffcba53fa160560bd3 not found: ID does not exist" Sep 30 20:54:00 crc kubenswrapper[4603]: I0930 20:54:00.776803 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" path="/var/lib/kubelet/pods/cd79136d-8adb-455e-864d-81d011ca7b4c/volumes" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.911465 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:02 crc kubenswrapper[4603]: E0930 20:54:02.913440 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="extract-utilities" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.913541 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="extract-utilities" Sep 30 20:54:02 crc kubenswrapper[4603]: E0930 20:54:02.913622 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="extract-content" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.913686 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="extract-content" Sep 30 20:54:02 crc kubenswrapper[4603]: E0930 20:54:02.913800 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="registry-server" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.913862 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="registry-server" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.914197 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd79136d-8adb-455e-864d-81d011ca7b4c" containerName="registry-server" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.916036 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.932411 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.995667 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8l9d\" (UniqueName: \"kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.995726 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:02 crc kubenswrapper[4603]: I0930 20:54:02.995787 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.098133 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8l9d\" (UniqueName: \"kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.098221 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.098306 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.098736 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.098820 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.133148 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8l9d\" (UniqueName: \"kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d\") pod \"community-operators-xk6w8\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.277113 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:03 crc kubenswrapper[4603]: I0930 20:54:03.932700 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:04 crc kubenswrapper[4603]: I0930 20:54:04.108770 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerStarted","Data":"f4f41a3d35454fa01785b82956f2f789c147a74f7cf7add1622cda865c6c5b46"} Sep 30 20:54:05 crc kubenswrapper[4603]: I0930 20:54:05.117226 4603 generic.go:334] "Generic (PLEG): container finished" podID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerID="fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245" exitCode=0 Sep 30 20:54:05 crc kubenswrapper[4603]: I0930 20:54:05.117470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerDied","Data":"fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245"} Sep 30 20:54:06 crc kubenswrapper[4603]: I0930 20:54:06.131327 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerStarted","Data":"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596"} Sep 30 20:54:08 crc kubenswrapper[4603]: I0930 20:54:08.148507 4603 generic.go:334] "Generic (PLEG): container finished" podID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerID="29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596" exitCode=0 Sep 30 20:54:08 crc kubenswrapper[4603]: I0930 20:54:08.149044 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerDied","Data":"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596"} Sep 30 20:54:08 crc kubenswrapper[4603]: I0930 20:54:08.441934 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:54:08 crc kubenswrapper[4603]: I0930 20:54:08.442009 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:54:10 crc kubenswrapper[4603]: I0930 20:54:10.165849 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerStarted","Data":"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f"} Sep 30 20:54:10 crc kubenswrapper[4603]: I0930 20:54:10.183133 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xk6w8" podStartSLOduration=4.669545551 podStartE2EDuration="8.183117659s" podCreationTimestamp="2025-09-30 20:54:02 +0000 UTC" firstStartedPulling="2025-09-30 20:54:05.121453406 +0000 UTC m=+4047.059912224" lastFinishedPulling="2025-09-30 20:54:08.635025514 +0000 UTC m=+4050.573484332" observedRunningTime="2025-09-30 20:54:10.180739283 +0000 UTC m=+4052.119198111" watchObservedRunningTime="2025-09-30 20:54:10.183117659 +0000 UTC m=+4052.121576477" Sep 30 20:54:13 crc kubenswrapper[4603]: I0930 20:54:13.278530 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:13 crc kubenswrapper[4603]: I0930 20:54:13.279045 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:13 crc kubenswrapper[4603]: I0930 20:54:13.428405 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:14 crc kubenswrapper[4603]: I0930 20:54:14.255096 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:14 crc kubenswrapper[4603]: I0930 20:54:14.311157 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:16 crc kubenswrapper[4603]: I0930 20:54:16.216298 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xk6w8" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="registry-server" containerID="cri-o://87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f" gracePeriod=2 Sep 30 20:54:16 crc kubenswrapper[4603]: I0930 20:54:16.972775 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.102266 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content\") pod \"fffb4ced-f54c-4e80-b32c-44f05605c053\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.102704 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities\") pod \"fffb4ced-f54c-4e80-b32c-44f05605c053\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.102772 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8l9d\" (UniqueName: \"kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d\") pod \"fffb4ced-f54c-4e80-b32c-44f05605c053\" (UID: \"fffb4ced-f54c-4e80-b32c-44f05605c053\") " Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.104091 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities" (OuterVolumeSpecName: "utilities") pod "fffb4ced-f54c-4e80-b32c-44f05605c053" (UID: "fffb4ced-f54c-4e80-b32c-44f05605c053"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.109353 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d" (OuterVolumeSpecName: "kube-api-access-t8l9d") pod "fffb4ced-f54c-4e80-b32c-44f05605c053" (UID: "fffb4ced-f54c-4e80-b32c-44f05605c053"). InnerVolumeSpecName "kube-api-access-t8l9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.204486 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8l9d\" (UniqueName: \"kubernetes.io/projected/fffb4ced-f54c-4e80-b32c-44f05605c053-kube-api-access-t8l9d\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.204518 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.228220 4603 generic.go:334] "Generic (PLEG): container finished" podID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerID="87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f" exitCode=0 Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.228312 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerDied","Data":"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f"} Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.229375 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xk6w8" event={"ID":"fffb4ced-f54c-4e80-b32c-44f05605c053","Type":"ContainerDied","Data":"f4f41a3d35454fa01785b82956f2f789c147a74f7cf7add1622cda865c6c5b46"} Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.228350 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xk6w8" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.229417 4603 scope.go:117] "RemoveContainer" containerID="87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.254435 4603 scope.go:117] "RemoveContainer" containerID="29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.278155 4603 scope.go:117] "RemoveContainer" containerID="fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.338829 4603 scope.go:117] "RemoveContainer" containerID="87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f" Sep 30 20:54:17 crc kubenswrapper[4603]: E0930 20:54:17.339490 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f\": container with ID starting with 87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f not found: ID does not exist" containerID="87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.339535 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f"} err="failed to get container status \"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f\": rpc error: code = NotFound desc = could not find container \"87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f\": container with ID starting with 87e8b489e7ab10efcc9573ce62e768cdfcbbb53ed9bcaa3279c5a1ead382070f not found: ID does not exist" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.339567 4603 scope.go:117] "RemoveContainer" containerID="29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596" Sep 30 20:54:17 crc kubenswrapper[4603]: E0930 20:54:17.340035 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596\": container with ID starting with 29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596 not found: ID does not exist" containerID="29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.340090 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596"} err="failed to get container status \"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596\": rpc error: code = NotFound desc = could not find container \"29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596\": container with ID starting with 29599127e5630b441c498042cb7d1181bb6f0bd46cfa9ac6bb17b00041019596 not found: ID does not exist" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.340129 4603 scope.go:117] "RemoveContainer" containerID="fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245" Sep 30 20:54:17 crc kubenswrapper[4603]: E0930 20:54:17.340511 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245\": container with ID starting with fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245 not found: ID does not exist" containerID="fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.340536 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245"} err="failed to get container status \"fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245\": rpc error: code = NotFound desc = could not find container \"fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245\": container with ID starting with fe3efa8a3d829fd685d0fadb83dd92d3070428d1859aadab06306f0a99462245 not found: ID does not exist" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.488618 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fffb4ced-f54c-4e80-b32c-44f05605c053" (UID: "fffb4ced-f54c-4e80-b32c-44f05605c053"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.511248 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fffb4ced-f54c-4e80-b32c-44f05605c053-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.570268 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:17 crc kubenswrapper[4603]: I0930 20:54:17.577050 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xk6w8"] Sep 30 20:54:18 crc kubenswrapper[4603]: I0930 20:54:18.780035 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" path="/var/lib/kubelet/pods/fffb4ced-f54c-4e80-b32c-44f05605c053/volumes" Sep 30 20:54:38 crc kubenswrapper[4603]: I0930 20:54:38.442001 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:54:38 crc kubenswrapper[4603]: I0930 20:54:38.442551 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:54:38 crc kubenswrapper[4603]: I0930 20:54:38.442588 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:54:38 crc kubenswrapper[4603]: I0930 20:54:38.443246 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:54:38 crc kubenswrapper[4603]: I0930 20:54:38.443289 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89" gracePeriod=600 Sep 30 20:54:39 crc kubenswrapper[4603]: I0930 20:54:39.442314 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89" exitCode=0 Sep 30 20:54:39 crc kubenswrapper[4603]: I0930 20:54:39.442531 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89"} Sep 30 20:54:39 crc kubenswrapper[4603]: I0930 20:54:39.442979 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511"} Sep 30 20:54:39 crc kubenswrapper[4603]: I0930 20:54:39.443015 4603 scope.go:117] "RemoveContainer" containerID="19c2edf905da60d0fcb99f43f3ab7deb271fbf6f1cffd8b3fb408e3d94969762" Sep 30 20:56:38 crc kubenswrapper[4603]: I0930 20:56:38.441796 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:56:38 crc kubenswrapper[4603]: I0930 20:56:38.442410 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:57:08 crc kubenswrapper[4603]: I0930 20:57:08.443352 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:57:08 crc kubenswrapper[4603]: I0930 20:57:08.444704 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:57:38 crc kubenswrapper[4603]: I0930 20:57:38.449324 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:57:38 crc kubenswrapper[4603]: I0930 20:57:38.449790 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:57:38 crc kubenswrapper[4603]: I0930 20:57:38.449829 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 20:57:38 crc kubenswrapper[4603]: I0930 20:57:38.450562 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:57:38 crc kubenswrapper[4603]: I0930 20:57:38.450610 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" gracePeriod=600 Sep 30 20:57:38 crc kubenswrapper[4603]: E0930 20:57:38.583423 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:57:39 crc kubenswrapper[4603]: I0930 20:57:39.157522 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" exitCode=0 Sep 30 20:57:39 crc kubenswrapper[4603]: I0930 20:57:39.157571 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511"} Sep 30 20:57:39 crc kubenswrapper[4603]: I0930 20:57:39.157605 4603 scope.go:117] "RemoveContainer" containerID="2cec47052d8a04bfde0b665fab6dcfca1f5fe851fc980fcbc4d8b8526beccc89" Sep 30 20:57:39 crc kubenswrapper[4603]: I0930 20:57:39.158277 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:57:39 crc kubenswrapper[4603]: E0930 20:57:39.158575 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:57:53 crc kubenswrapper[4603]: I0930 20:57:53.763776 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:57:53 crc kubenswrapper[4603]: E0930 20:57:53.764636 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.046987 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:00 crc kubenswrapper[4603]: E0930 20:58:00.047906 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="extract-utilities" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.047923 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="extract-utilities" Sep 30 20:58:00 crc kubenswrapper[4603]: E0930 20:58:00.047959 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="extract-content" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.047968 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="extract-content" Sep 30 20:58:00 crc kubenswrapper[4603]: E0930 20:58:00.047991 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="registry-server" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.047999 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="registry-server" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.048246 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="fffb4ced-f54c-4e80-b32c-44f05605c053" containerName="registry-server" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.049912 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.061983 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.166883 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.166962 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.167057 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwmwt\" (UniqueName: \"kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.268811 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwmwt\" (UniqueName: \"kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.268958 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.269016 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.269548 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.269602 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.292057 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwmwt\" (UniqueName: \"kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt\") pod \"redhat-marketplace-d2cq5\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.381430 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:00 crc kubenswrapper[4603]: I0930 20:58:00.817569 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:01 crc kubenswrapper[4603]: I0930 20:58:01.394239 4603 generic.go:334] "Generic (PLEG): container finished" podID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerID="aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64" exitCode=0 Sep 30 20:58:01 crc kubenswrapper[4603]: I0930 20:58:01.394280 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerDied","Data":"aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64"} Sep 30 20:58:01 crc kubenswrapper[4603]: I0930 20:58:01.394317 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerStarted","Data":"61f0802145e7faf7557174c1d534e3a6506917adc6d89112134314c1f8babbbb"} Sep 30 20:58:02 crc kubenswrapper[4603]: I0930 20:58:02.406723 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerStarted","Data":"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34"} Sep 30 20:58:03 crc kubenswrapper[4603]: I0930 20:58:03.416418 4603 generic.go:334] "Generic (PLEG): container finished" podID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerID="a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34" exitCode=0 Sep 30 20:58:03 crc kubenswrapper[4603]: I0930 20:58:03.416495 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerDied","Data":"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34"} Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.428054 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerStarted","Data":"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca"} Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.452332 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d2cq5" podStartSLOduration=1.919217104 podStartE2EDuration="4.452313672s" podCreationTimestamp="2025-09-30 20:58:00 +0000 UTC" firstStartedPulling="2025-09-30 20:58:01.395785198 +0000 UTC m=+4283.334244016" lastFinishedPulling="2025-09-30 20:58:03.928881766 +0000 UTC m=+4285.867340584" observedRunningTime="2025-09-30 20:58:04.445260168 +0000 UTC m=+4286.383718996" watchObservedRunningTime="2025-09-30 20:58:04.452313672 +0000 UTC m=+4286.390772500" Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.770087 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:58:04 crc kubenswrapper[4603]: E0930 20:58:04.770378 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.843887 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.845921 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.862572 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.962759 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.963125 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:04 crc kubenswrapper[4603]: I0930 20:58:04.963193 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7lz8\" (UniqueName: \"kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.064364 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.064818 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7lz8\" (UniqueName: \"kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.064928 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.065055 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.065478 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.083206 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7lz8\" (UniqueName: \"kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8\") pod \"certified-operators-9gslq\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.183341 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:05 crc kubenswrapper[4603]: I0930 20:58:05.746642 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:06 crc kubenswrapper[4603]: I0930 20:58:06.459438 4603 generic.go:334] "Generic (PLEG): container finished" podID="343d7323-3116-4aea-87fa-37a71b471260" containerID="efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0" exitCode=0 Sep 30 20:58:06 crc kubenswrapper[4603]: I0930 20:58:06.459477 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerDied","Data":"efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0"} Sep 30 20:58:06 crc kubenswrapper[4603]: I0930 20:58:06.459699 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerStarted","Data":"c5a5d22700f92391f4dc691486093d51edce872dac6af8aa6f07ae36db8c6ca4"} Sep 30 20:58:08 crc kubenswrapper[4603]: I0930 20:58:08.478578 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerStarted","Data":"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65"} Sep 30 20:58:09 crc kubenswrapper[4603]: I0930 20:58:09.488904 4603 generic.go:334] "Generic (PLEG): container finished" podID="343d7323-3116-4aea-87fa-37a71b471260" containerID="0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65" exitCode=0 Sep 30 20:58:09 crc kubenswrapper[4603]: I0930 20:58:09.489192 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerDied","Data":"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65"} Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.382563 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.382886 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.428941 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.499244 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerStarted","Data":"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475"} Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.517870 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9gslq" podStartSLOduration=2.93076442 podStartE2EDuration="6.517852848s" podCreationTimestamp="2025-09-30 20:58:04 +0000 UTC" firstStartedPulling="2025-09-30 20:58:06.460935396 +0000 UTC m=+4288.399394214" lastFinishedPulling="2025-09-30 20:58:10.048023784 +0000 UTC m=+4291.986482642" observedRunningTime="2025-09-30 20:58:10.513825017 +0000 UTC m=+4292.452283835" watchObservedRunningTime="2025-09-30 20:58:10.517852848 +0000 UTC m=+4292.456311666" Sep 30 20:58:10 crc kubenswrapper[4603]: I0930 20:58:10.546778 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:12 crc kubenswrapper[4603]: I0930 20:58:12.839000 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:12 crc kubenswrapper[4603]: I0930 20:58:12.839682 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d2cq5" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="registry-server" containerID="cri-o://0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca" gracePeriod=2 Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.377299 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.417249 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities\") pod \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.417313 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content\") pod \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.417510 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwmwt\" (UniqueName: \"kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt\") pod \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\" (UID: \"56cfbf26-ed00-45da-b214-bcad23f1eb2c\") " Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.419104 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities" (OuterVolumeSpecName: "utilities") pod "56cfbf26-ed00-45da-b214-bcad23f1eb2c" (UID: "56cfbf26-ed00-45da-b214-bcad23f1eb2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.430636 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56cfbf26-ed00-45da-b214-bcad23f1eb2c" (UID: "56cfbf26-ed00-45da-b214-bcad23f1eb2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.450277 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt" (OuterVolumeSpecName: "kube-api-access-vwmwt") pod "56cfbf26-ed00-45da-b214-bcad23f1eb2c" (UID: "56cfbf26-ed00-45da-b214-bcad23f1eb2c"). InnerVolumeSpecName "kube-api-access-vwmwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.519589 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.519626 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cfbf26-ed00-45da-b214-bcad23f1eb2c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.519639 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwmwt\" (UniqueName: \"kubernetes.io/projected/56cfbf26-ed00-45da-b214-bcad23f1eb2c-kube-api-access-vwmwt\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.527137 4603 generic.go:334] "Generic (PLEG): container finished" podID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerID="0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca" exitCode=0 Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.527205 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2cq5" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.527235 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerDied","Data":"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca"} Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.527305 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2cq5" event={"ID":"56cfbf26-ed00-45da-b214-bcad23f1eb2c","Type":"ContainerDied","Data":"61f0802145e7faf7557174c1d534e3a6506917adc6d89112134314c1f8babbbb"} Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.527331 4603 scope.go:117] "RemoveContainer" containerID="0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.561237 4603 scope.go:117] "RemoveContainer" containerID="a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.565784 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.589239 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2cq5"] Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.600318 4603 scope.go:117] "RemoveContainer" containerID="aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.631253 4603 scope.go:117] "RemoveContainer" containerID="0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca" Sep 30 20:58:13 crc kubenswrapper[4603]: E0930 20:58:13.631657 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca\": container with ID starting with 0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca not found: ID does not exist" containerID="0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.631686 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca"} err="failed to get container status \"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca\": rpc error: code = NotFound desc = could not find container \"0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca\": container with ID starting with 0f23bbecf57b881d98b3c84cb24ce4223349790b6149e142894567df431c53ca not found: ID does not exist" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.631709 4603 scope.go:117] "RemoveContainer" containerID="a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34" Sep 30 20:58:13 crc kubenswrapper[4603]: E0930 20:58:13.631932 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34\": container with ID starting with a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34 not found: ID does not exist" containerID="a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.631952 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34"} err="failed to get container status \"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34\": rpc error: code = NotFound desc = could not find container \"a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34\": container with ID starting with a8e22162fceccfa84a021d2bef8195b226cde8e3397025a665c7e4a4b1f95b34 not found: ID does not exist" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.631963 4603 scope.go:117] "RemoveContainer" containerID="aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64" Sep 30 20:58:13 crc kubenswrapper[4603]: E0930 20:58:13.632155 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64\": container with ID starting with aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64 not found: ID does not exist" containerID="aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64" Sep 30 20:58:13 crc kubenswrapper[4603]: I0930 20:58:13.632225 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64"} err="failed to get container status \"aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64\": rpc error: code = NotFound desc = could not find container \"aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64\": container with ID starting with aec819b374c5f5eeec419f1d291c77bde286849bb69313d44d86186bb4ca9e64 not found: ID does not exist" Sep 30 20:58:14 crc kubenswrapper[4603]: I0930 20:58:14.782074 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" path="/var/lib/kubelet/pods/56cfbf26-ed00-45da-b214-bcad23f1eb2c/volumes" Sep 30 20:58:15 crc kubenswrapper[4603]: I0930 20:58:15.184340 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:15 crc kubenswrapper[4603]: I0930 20:58:15.184390 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:15 crc kubenswrapper[4603]: I0930 20:58:15.248954 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:15 crc kubenswrapper[4603]: I0930 20:58:15.607444 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:15 crc kubenswrapper[4603]: I0930 20:58:15.764381 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:58:15 crc kubenswrapper[4603]: E0930 20:58:15.764744 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:58:16 crc kubenswrapper[4603]: I0930 20:58:16.838023 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:17 crc kubenswrapper[4603]: I0930 20:58:17.564393 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9gslq" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="registry-server" containerID="cri-o://f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475" gracePeriod=2 Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.081035 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.102156 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities\") pod \"343d7323-3116-4aea-87fa-37a71b471260\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.102296 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content\") pod \"343d7323-3116-4aea-87fa-37a71b471260\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.102602 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7lz8\" (UniqueName: \"kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8\") pod \"343d7323-3116-4aea-87fa-37a71b471260\" (UID: \"343d7323-3116-4aea-87fa-37a71b471260\") " Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.103084 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities" (OuterVolumeSpecName: "utilities") pod "343d7323-3116-4aea-87fa-37a71b471260" (UID: "343d7323-3116-4aea-87fa-37a71b471260"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.104217 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.107906 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8" (OuterVolumeSpecName: "kube-api-access-g7lz8") pod "343d7323-3116-4aea-87fa-37a71b471260" (UID: "343d7323-3116-4aea-87fa-37a71b471260"). InnerVolumeSpecName "kube-api-access-g7lz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.156035 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "343d7323-3116-4aea-87fa-37a71b471260" (UID: "343d7323-3116-4aea-87fa-37a71b471260"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.205274 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7lz8\" (UniqueName: \"kubernetes.io/projected/343d7323-3116-4aea-87fa-37a71b471260-kube-api-access-g7lz8\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.205309 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/343d7323-3116-4aea-87fa-37a71b471260-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.589815 4603 generic.go:334] "Generic (PLEG): container finished" podID="343d7323-3116-4aea-87fa-37a71b471260" containerID="f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475" exitCode=0 Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.589866 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerDied","Data":"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475"} Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.589905 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9gslq" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.589934 4603 scope.go:117] "RemoveContainer" containerID="f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.589917 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9gslq" event={"ID":"343d7323-3116-4aea-87fa-37a71b471260","Type":"ContainerDied","Data":"c5a5d22700f92391f4dc691486093d51edce872dac6af8aa6f07ae36db8c6ca4"} Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.614427 4603 scope.go:117] "RemoveContainer" containerID="0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.656579 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.656721 4603 scope.go:117] "RemoveContainer" containerID="efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.703225 4603 scope.go:117] "RemoveContainer" containerID="f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475" Sep 30 20:58:18 crc kubenswrapper[4603]: E0930 20:58:18.704007 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475\": container with ID starting with f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475 not found: ID does not exist" containerID="f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.704087 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475"} err="failed to get container status \"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475\": rpc error: code = NotFound desc = could not find container \"f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475\": container with ID starting with f9f6c10429eebaa965b80aafa6e69c580001f938519f89fa9f0fd2aed324b475 not found: ID does not exist" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.704120 4603 scope.go:117] "RemoveContainer" containerID="0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65" Sep 30 20:58:18 crc kubenswrapper[4603]: E0930 20:58:18.704642 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65\": container with ID starting with 0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65 not found: ID does not exist" containerID="0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.704759 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65"} err="failed to get container status \"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65\": rpc error: code = NotFound desc = could not find container \"0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65\": container with ID starting with 0003a927c9b1961ee0ad090cf747b7733c2bc5e635a77d5a09ba3b9bf7467c65 not found: ID does not exist" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.704829 4603 scope.go:117] "RemoveContainer" containerID="efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0" Sep 30 20:58:18 crc kubenswrapper[4603]: E0930 20:58:18.705284 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0\": container with ID starting with efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0 not found: ID does not exist" containerID="efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.705317 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0"} err="failed to get container status \"efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0\": rpc error: code = NotFound desc = could not find container \"efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0\": container with ID starting with efe8869d83f2af06a3a5b812c676a38a3b2c9bf35893ce007c2d518f0e921ca0 not found: ID does not exist" Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.712541 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9gslq"] Sep 30 20:58:18 crc kubenswrapper[4603]: I0930 20:58:18.776618 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="343d7323-3116-4aea-87fa-37a71b471260" path="/var/lib/kubelet/pods/343d7323-3116-4aea-87fa-37a71b471260/volumes" Sep 30 20:58:24 crc kubenswrapper[4603]: I0930 20:58:24.646447 4603 generic.go:334] "Generic (PLEG): container finished" podID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" containerID="080a0cc939faad9507b5ecab3c26b2b8ebffe49a77db8508211f83897604b4f5" exitCode=0 Sep 30 20:58:24 crc kubenswrapper[4603]: I0930 20:58:24.646564 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2c3f4ced-b58c-409a-a046-b0803bdd6d44","Type":"ContainerDied","Data":"080a0cc939faad9507b5ecab3c26b2b8ebffe49a77db8508211f83897604b4f5"} Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.014421 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087023 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087068 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4b5w\" (UniqueName: \"kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087091 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087116 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087149 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087180 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087217 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087237 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.087291 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data\") pod \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\" (UID: \"2c3f4ced-b58c-409a-a046-b0803bdd6d44\") " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.089319 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.090373 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data" (OuterVolumeSpecName: "config-data") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.093464 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w" (OuterVolumeSpecName: "kube-api-access-k4b5w") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "kube-api-access-k4b5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.094382 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.097187 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.128667 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.129117 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.131099 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.153589 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2c3f4ced-b58c-409a-a046-b0803bdd6d44" (UID: "2c3f4ced-b58c-409a-a046-b0803bdd6d44"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192285 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4b5w\" (UniqueName: \"kubernetes.io/projected/2c3f4ced-b58c-409a-a046-b0803bdd6d44-kube-api-access-k4b5w\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192325 4603 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192340 4603 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192380 4603 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192392 4603 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192407 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192419 4603 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2c3f4ced-b58c-409a-a046-b0803bdd6d44-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192429 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c3f4ced-b58c-409a-a046-b0803bdd6d44-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.192439 4603 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2c3f4ced-b58c-409a-a046-b0803bdd6d44-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.216867 4603 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.293507 4603 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.671064 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2c3f4ced-b58c-409a-a046-b0803bdd6d44","Type":"ContainerDied","Data":"ea1cd62fb6df6f5a9508d065e68bbb4043595159553e885a9f902899dd03be66"} Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.671583 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea1cd62fb6df6f5a9508d065e68bbb4043595159553e885a9f902899dd03be66" Sep 30 20:58:26 crc kubenswrapper[4603]: I0930 20:58:26.671225 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 20:58:30 crc kubenswrapper[4603]: I0930 20:58:30.764867 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:58:30 crc kubenswrapper[4603]: E0930 20:58:30.765486 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.044493 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045579 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" containerName="tempest-tests-tempest-tests-runner" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045611 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" containerName="tempest-tests-tempest-tests-runner" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045639 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="extract-content" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045651 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="extract-content" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045692 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="extract-utilities" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045705 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="extract-utilities" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045747 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="extract-content" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045760 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="extract-content" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045784 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="extract-utilities" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045795 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="extract-utilities" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045817 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045827 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: E0930 20:58:33.045847 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.045859 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.046156 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="343d7323-3116-4aea-87fa-37a71b471260" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.046198 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="56cfbf26-ed00-45da-b214-bcad23f1eb2c" containerName="registry-server" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.046242 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c3f4ced-b58c-409a-a046-b0803bdd6d44" containerName="tempest-tests-tempest-tests-runner" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.047385 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.049527 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nflcc" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.058871 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.238222 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6v5x\" (UniqueName: \"kubernetes.io/projected/f55ed2ad-4f61-4173-8ccb-9b67e72f494a-kube-api-access-j6v5x\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.238847 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.341227 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.341505 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6v5x\" (UniqueName: \"kubernetes.io/projected/f55ed2ad-4f61-4173-8ccb-9b67e72f494a-kube-api-access-j6v5x\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.341954 4603 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.378826 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6v5x\" (UniqueName: \"kubernetes.io/projected/f55ed2ad-4f61-4173-8ccb-9b67e72f494a-kube-api-access-j6v5x\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.395784 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f55ed2ad-4f61-4173-8ccb-9b67e72f494a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:33 crc kubenswrapper[4603]: I0930 20:58:33.681030 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 20:58:34 crc kubenswrapper[4603]: I0930 20:58:34.149704 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 20:58:34 crc kubenswrapper[4603]: I0930 20:58:34.744777 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f55ed2ad-4f61-4173-8ccb-9b67e72f494a","Type":"ContainerStarted","Data":"d588b67170fdfb4449f30fde97545f5e18ff30420dd8e4117aa145ff9d6e835a"} Sep 30 20:58:36 crc kubenswrapper[4603]: I0930 20:58:36.806575 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f55ed2ad-4f61-4173-8ccb-9b67e72f494a","Type":"ContainerStarted","Data":"7b391286870c7b1e8817a8e73c11da6a3a4d94c0dbdd7818109cfd30c2b68f66"} Sep 30 20:58:36 crc kubenswrapper[4603]: I0930 20:58:36.819957 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.714506958 podStartE2EDuration="3.819930549s" podCreationTimestamp="2025-09-30 20:58:33 +0000 UTC" firstStartedPulling="2025-09-30 20:58:34.54876804 +0000 UTC m=+4316.487226868" lastFinishedPulling="2025-09-30 20:58:35.654191631 +0000 UTC m=+4317.592650459" observedRunningTime="2025-09-30 20:58:36.808902224 +0000 UTC m=+4318.747361072" watchObservedRunningTime="2025-09-30 20:58:36.819930549 +0000 UTC m=+4318.758389387" Sep 30 20:58:41 crc kubenswrapper[4603]: I0930 20:58:41.764406 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:58:41 crc kubenswrapper[4603]: E0930 20:58:41.764892 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.124181 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zxd6r/must-gather-pnj2m"] Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.130040 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.135021 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zxd6r"/"kube-root-ca.crt" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.135562 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-zxd6r"/"default-dockercfg-xg5nn" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.135419 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zxd6r"/"openshift-service-ca.crt" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.140916 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zxd6r/must-gather-pnj2m"] Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.158052 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g277k\" (UniqueName: \"kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.158179 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.260277 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g277k\" (UniqueName: \"kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.260343 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.260913 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.276056 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g277k\" (UniqueName: \"kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k\") pod \"must-gather-pnj2m\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:54 crc kubenswrapper[4603]: I0930 20:58:54.458131 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 20:58:55 crc kubenswrapper[4603]: I0930 20:58:55.262013 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zxd6r/must-gather-pnj2m"] Sep 30 20:58:55 crc kubenswrapper[4603]: W0930 20:58:55.271956 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod836a5274_ebb6_408d_bbe8_7e304b3d0d56.slice/crio-5da7b16f97705d81ad13275fe3d0985c63f69f02490c4fd5299b58edc7fddbcd WatchSource:0}: Error finding container 5da7b16f97705d81ad13275fe3d0985c63f69f02490c4fd5299b58edc7fddbcd: Status 404 returned error can't find the container with id 5da7b16f97705d81ad13275fe3d0985c63f69f02490c4fd5299b58edc7fddbcd Sep 30 20:58:55 crc kubenswrapper[4603]: I0930 20:58:55.274504 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:58:55 crc kubenswrapper[4603]: I0930 20:58:55.949136 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" event={"ID":"836a5274-ebb6-408d-bbe8-7e304b3d0d56","Type":"ContainerStarted","Data":"5da7b16f97705d81ad13275fe3d0985c63f69f02490c4fd5299b58edc7fddbcd"} Sep 30 20:58:56 crc kubenswrapper[4603]: I0930 20:58:56.764906 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:58:56 crc kubenswrapper[4603]: E0930 20:58:56.766405 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:59:01 crc kubenswrapper[4603]: I0930 20:59:01.004409 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" event={"ID":"836a5274-ebb6-408d-bbe8-7e304b3d0d56","Type":"ContainerStarted","Data":"24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945"} Sep 30 20:59:01 crc kubenswrapper[4603]: I0930 20:59:01.005053 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" event={"ID":"836a5274-ebb6-408d-bbe8-7e304b3d0d56","Type":"ContainerStarted","Data":"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491"} Sep 30 20:59:01 crc kubenswrapper[4603]: I0930 20:59:01.037230 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" podStartSLOduration=2.5260044329999998 podStartE2EDuration="7.037214881s" podCreationTimestamp="2025-09-30 20:58:54 +0000 UTC" firstStartedPulling="2025-09-30 20:58:55.274238187 +0000 UTC m=+4337.212697015" lastFinishedPulling="2025-09-30 20:58:59.785448645 +0000 UTC m=+4341.723907463" observedRunningTime="2025-09-30 20:59:01.031670597 +0000 UTC m=+4342.970129405" watchObservedRunningTime="2025-09-30 20:59:01.037214881 +0000 UTC m=+4342.975673699" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.198239 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-klpq4"] Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.199982 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.295233 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btczf\" (UniqueName: \"kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.295518 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.397973 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btczf\" (UniqueName: \"kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.398019 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.398155 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.423635 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btczf\" (UniqueName: \"kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf\") pod \"crc-debug-klpq4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:06 crc kubenswrapper[4603]: I0930 20:59:06.521022 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 20:59:07 crc kubenswrapper[4603]: I0930 20:59:07.059933 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" event={"ID":"baeefb53-216b-4c44-99ca-03638e9c9ed4","Type":"ContainerStarted","Data":"d8394cdb84fa32bfd3076aa587a774ccf111917e99a38bb24e388f1311893396"} Sep 30 20:59:10 crc kubenswrapper[4603]: I0930 20:59:10.764779 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:59:10 crc kubenswrapper[4603]: E0930 20:59:10.765607 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:59:21 crc kubenswrapper[4603]: I0930 20:59:21.191714 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" event={"ID":"baeefb53-216b-4c44-99ca-03638e9c9ed4","Type":"ContainerStarted","Data":"9d944602634d42b111deb04216cee22d222d7c230f9b4ac8d030f9058c6c3656"} Sep 30 20:59:21 crc kubenswrapper[4603]: I0930 20:59:21.213083 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" podStartSLOduration=1.7519439669999999 podStartE2EDuration="15.213058308s" podCreationTimestamp="2025-09-30 20:59:06 +0000 UTC" firstStartedPulling="2025-09-30 20:59:06.556783777 +0000 UTC m=+4348.495242595" lastFinishedPulling="2025-09-30 20:59:20.017898108 +0000 UTC m=+4361.956356936" observedRunningTime="2025-09-30 20:59:21.205681805 +0000 UTC m=+4363.144140623" watchObservedRunningTime="2025-09-30 20:59:21.213058308 +0000 UTC m=+4363.151517126" Sep 30 20:59:24 crc kubenswrapper[4603]: I0930 20:59:24.765112 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:59:24 crc kubenswrapper[4603]: E0930 20:59:24.765913 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:59:36 crc kubenswrapper[4603]: I0930 20:59:36.764632 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:59:36 crc kubenswrapper[4603]: E0930 20:59:36.765192 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 20:59:50 crc kubenswrapper[4603]: I0930 20:59:50.764787 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 20:59:50 crc kubenswrapper[4603]: E0930 20:59:50.765574 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.151460 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6"] Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.154274 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.157505 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.159531 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.176233 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6"] Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.322822 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.323183 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvzr8\" (UniqueName: \"kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.323342 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.424676 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.424761 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.424811 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvzr8\" (UniqueName: \"kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.426059 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.446328 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.447587 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvzr8\" (UniqueName: \"kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8\") pod \"collect-profiles-29321100-xztb6\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.482604 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:00 crc kubenswrapper[4603]: I0930 21:00:00.987792 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6"] Sep 30 21:00:01 crc kubenswrapper[4603]: W0930 21:00:01.013873 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62f31b2e_42bf_4b16_8dcc_871e7862d74f.slice/crio-228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84 WatchSource:0}: Error finding container 228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84: Status 404 returned error can't find the container with id 228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84 Sep 30 21:00:01 crc kubenswrapper[4603]: I0930 21:00:01.573846 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" event={"ID":"62f31b2e-42bf-4b16-8dcc-871e7862d74f","Type":"ContainerStarted","Data":"82e5e39b6d7565403dfbe78aa1fc8a47542ee1246780c0253433f413d927fd0b"} Sep 30 21:00:01 crc kubenswrapper[4603]: I0930 21:00:01.575483 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" event={"ID":"62f31b2e-42bf-4b16-8dcc-871e7862d74f","Type":"ContainerStarted","Data":"228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84"} Sep 30 21:00:01 crc kubenswrapper[4603]: I0930 21:00:01.597742 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" podStartSLOduration=1.597719393 podStartE2EDuration="1.597719393s" podCreationTimestamp="2025-09-30 21:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:00:01.591672236 +0000 UTC m=+4403.530131054" watchObservedRunningTime="2025-09-30 21:00:01.597719393 +0000 UTC m=+4403.536178211" Sep 30 21:00:02 crc kubenswrapper[4603]: I0930 21:00:02.586949 4603 generic.go:334] "Generic (PLEG): container finished" podID="62f31b2e-42bf-4b16-8dcc-871e7862d74f" containerID="82e5e39b6d7565403dfbe78aa1fc8a47542ee1246780c0253433f413d927fd0b" exitCode=0 Sep 30 21:00:02 crc kubenswrapper[4603]: I0930 21:00:02.587770 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" event={"ID":"62f31b2e-42bf-4b16-8dcc-871e7862d74f","Type":"ContainerDied","Data":"82e5e39b6d7565403dfbe78aa1fc8a47542ee1246780c0253433f413d927fd0b"} Sep 30 21:00:03 crc kubenswrapper[4603]: I0930 21:00:03.765106 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:00:03 crc kubenswrapper[4603]: E0930 21:00:03.765800 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.130602 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.220758 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvzr8\" (UniqueName: \"kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8\") pod \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.220900 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume\") pod \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.221015 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume\") pod \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\" (UID: \"62f31b2e-42bf-4b16-8dcc-871e7862d74f\") " Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.222367 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume" (OuterVolumeSpecName: "config-volume") pod "62f31b2e-42bf-4b16-8dcc-871e7862d74f" (UID: "62f31b2e-42bf-4b16-8dcc-871e7862d74f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.242299 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8" (OuterVolumeSpecName: "kube-api-access-zvzr8") pod "62f31b2e-42bf-4b16-8dcc-871e7862d74f" (UID: "62f31b2e-42bf-4b16-8dcc-871e7862d74f"). InnerVolumeSpecName "kube-api-access-zvzr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.249473 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "62f31b2e-42bf-4b16-8dcc-871e7862d74f" (UID: "62f31b2e-42bf-4b16-8dcc-871e7862d74f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.325676 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvzr8\" (UniqueName: \"kubernetes.io/projected/62f31b2e-42bf-4b16-8dcc-871e7862d74f-kube-api-access-zvzr8\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.325705 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62f31b2e-42bf-4b16-8dcc-871e7862d74f-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.325714 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62f31b2e-42bf-4b16-8dcc-871e7862d74f-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.612930 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" event={"ID":"62f31b2e-42bf-4b16-8dcc-871e7862d74f","Type":"ContainerDied","Data":"228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84"} Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.612994 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="228fcabcb506938dce0e66a115e2244717df248993b1fc05b50e99fc327d0d84" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.613065 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-xztb6" Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.667504 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52"] Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.677521 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-5cz52"] Sep 30 21:00:04 crc kubenswrapper[4603]: I0930 21:00:04.777847 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="401333a8-8cdd-499e-a656-823b0fdc0828" path="/var/lib/kubelet/pods/401333a8-8cdd-499e-a656-823b0fdc0828/volumes" Sep 30 21:00:18 crc kubenswrapper[4603]: I0930 21:00:18.770408 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:00:18 crc kubenswrapper[4603]: E0930 21:00:18.771032 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:30 crc kubenswrapper[4603]: I0930 21:00:30.765303 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:00:30 crc kubenswrapper[4603]: E0930 21:00:30.766377 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:42 crc kubenswrapper[4603]: I0930 21:00:42.510624 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57b5d479d8-xlxfm_850ec93c-cfa1-4bb4-905b-1b8296985c50/barbican-api/0.log" Sep 30 21:00:42 crc kubenswrapper[4603]: I0930 21:00:42.553076 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57b5d479d8-xlxfm_850ec93c-cfa1-4bb4-905b-1b8296985c50/barbican-api-log/0.log" Sep 30 21:00:42 crc kubenswrapper[4603]: I0930 21:00:42.755982 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-794f5b84fd-6qbxk_47e3b799-3f78-46c1-916e-cca00da66c8c/barbican-keystone-listener/0.log" Sep 30 21:00:42 crc kubenswrapper[4603]: I0930 21:00:42.843838 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-794f5b84fd-6qbxk_47e3b799-3f78-46c1-916e-cca00da66c8c/barbican-keystone-listener-log/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.036083 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-694445bff9-srxdg_da706fb6-9ab9-4c32-bd34-2b9afe444c20/barbican-worker/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.116563 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-694445bff9-srxdg_da706fb6-9ab9-4c32-bd34-2b9afe444c20/barbican-worker-log/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.382885 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd_d826ad98-bfbb-4355-b0a2-c7ea9715b990/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.532660 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/ceilometer-central-agent/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.584664 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/ceilometer-notification-agent/0.log" Sep 30 21:00:43 crc kubenswrapper[4603]: I0930 21:00:43.764441 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:00:43 crc kubenswrapper[4603]: E0930 21:00:43.764794 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.169141 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/sg-core/0.log" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.299259 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/proxy-httpd/0.log" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.459362 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f8849bc2-be9b-4897-9501-36c14d4e51f2/cinder-api-log/0.log" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.542305 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f8849bc2-be9b-4897-9501-36c14d4e51f2/cinder-api/0.log" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.716843 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b40198c2-8d68-4aab-9744-67114df39cc8/cinder-scheduler/0.log" Sep 30 21:00:44 crc kubenswrapper[4603]: I0930 21:00:44.769395 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b40198c2-8d68-4aab-9744-67114df39cc8/probe/0.log" Sep 30 21:00:45 crc kubenswrapper[4603]: I0930 21:00:45.088937 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-47wnn_af8365b2-113b-4c7b-8781-17cecdd6d3dd/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:45 crc kubenswrapper[4603]: I0930 21:00:45.166096 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-97w2f_b85ce012-d065-4005-9bbd-7bebe194cb45/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:45 crc kubenswrapper[4603]: I0930 21:00:45.370662 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx_260c57d2-7dcf-404e-83c2-64a074939299/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.011672 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/init/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.267756 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/init/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.291814 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/dnsmasq-dns/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.303086 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-nrkss_71fad0dd-a3d8-42b4-ab00-d98aa7368c5f/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.523613 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c24dd570-1c48-407f-bb26-0d85ab367883/glance-httpd/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.527301 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c24dd570-1c48-407f-bb26-0d85ab367883/glance-log/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.729943 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7/glance-httpd/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.786953 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7/glance-log/0.log" Sep 30 21:00:46 crc kubenswrapper[4603]: I0930 21:00:46.911176 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon/2.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.087926 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon/1.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.290735 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hndqq_f4f9e63e-bace-4185-a45a-cbc16d4be310/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.463586 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-zvdt8_fea678fb-af98-424b-9231-32d6991910a3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.490309 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon-log/0.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.778882 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_96ef0530-4c62-4ebf-b58d-59284fcdcad0/kube-state-metrics/0.log" Sep 30 21:00:47 crc kubenswrapper[4603]: I0930 21:00:47.832157 4603 scope.go:117] "RemoveContainer" containerID="76e2bf7601242fda162a65fd9b0412430835ddae5d79faad9b70af423d3546fa" Sep 30 21:00:48 crc kubenswrapper[4603]: I0930 21:00:48.074545 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-8bd66565b-k2wg7_614e45f1-3173-4eb1-8b47-56760f3468f4/keystone-api/0.log" Sep 30 21:00:48 crc kubenswrapper[4603]: I0930 21:00:48.328233 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h_fbf155fd-4bef-49a0-8bf0-eb16974f5e89/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:49 crc kubenswrapper[4603]: I0930 21:00:49.111286 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5_404b3593-d4d1-4440-a645-8669f3676f09/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:49 crc kubenswrapper[4603]: I0930 21:00:49.153028 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cfbc4c69-xfx2z_6d92de7a-d198-431c-a00c-bf93f63890c0/neutron-httpd/0.log" Sep 30 21:00:49 crc kubenswrapper[4603]: I0930 21:00:49.324244 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cfbc4c69-xfx2z_6d92de7a-d198-431c-a00c-bf93f63890c0/neutron-api/0.log" Sep 30 21:00:49 crc kubenswrapper[4603]: I0930 21:00:49.392912 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_521a0a10-00f0-4bf2-8d0e-36ed170f6949/memcached/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.079094 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_6f9d13ef-768e-47e0-aa2d-f21e801a8e3b/nova-cell0-conductor-conductor/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.524093 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_090bfcb2-0286-4e66-a22f-d79a55de8ff8/nova-cell1-conductor-conductor/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.610491 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bee4fa0d-d809-44ef-b123-a8ec31dda906/nova-api-log/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.655384 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bee4fa0d-d809-44ef-b123-a8ec31dda906/nova-api-api/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.697801 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1e631e44-8a31-40d5-8463-cc93716e2a6c/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.871475 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-tkcxx_cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:50 crc kubenswrapper[4603]: I0930 21:00:50.984885 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc2cc2ea-71b3-4874-8bcc-8504cb63b192/nova-metadata-log/0.log" Sep 30 21:00:51 crc kubenswrapper[4603]: I0930 21:00:51.500114 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/mysql-bootstrap/0.log" Sep 30 21:00:51 crc kubenswrapper[4603]: I0930 21:00:51.628762 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_8e3c4b82-8309-4c22-af40-2d9c530b0ef7/nova-scheduler-scheduler/0.log" Sep 30 21:00:51 crc kubenswrapper[4603]: I0930 21:00:51.729387 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/mysql-bootstrap/0.log" Sep 30 21:00:51 crc kubenswrapper[4603]: I0930 21:00:51.813969 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/galera/0.log" Sep 30 21:00:52 crc kubenswrapper[4603]: I0930 21:00:52.036411 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/mysql-bootstrap/0.log" Sep 30 21:00:52 crc kubenswrapper[4603]: I0930 21:00:52.264825 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/mysql-bootstrap/0.log" Sep 30 21:00:52 crc kubenswrapper[4603]: I0930 21:00:52.370705 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc2cc2ea-71b3-4874-8bcc-8504cb63b192/nova-metadata-metadata/0.log" Sep 30 21:00:52 crc kubenswrapper[4603]: I0930 21:00:52.388433 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/galera/0.log" Sep 30 21:00:52 crc kubenswrapper[4603]: I0930 21:00:52.484868 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0aaa6eda-a979-4944-b575-6b987d1e32f3/openstackclient/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.174395 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-9hpwx_2911bc12-77af-4d68-858f-28d3cc2e263e/ovn-controller/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.220598 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rjs74_d06f3a9f-3191-4f74-8ccd-e765ca5d6613/openstack-network-exporter/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.372602 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server-init/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.650095 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.674621 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server-init/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.678862 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovs-vswitchd/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.875613 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-gdhjt_09cd14dc-05cd-4a02-adde-bd6cc7b55643/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.910798 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f5abf540-1fba-4b2e-83c6-4be4e500f153/openstack-network-exporter/0.log" Sep 30 21:00:53 crc kubenswrapper[4603]: I0930 21:00:53.913289 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f5abf540-1fba-4b2e-83c6-4be4e500f153/ovn-northd/0.log" Sep 30 21:00:54 crc kubenswrapper[4603]: I0930 21:00:54.082125 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_69f6bf19-226d-409f-afba-67be196077f7/ovsdbserver-nb/0.log" Sep 30 21:00:54 crc kubenswrapper[4603]: I0930 21:00:54.084065 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_69f6bf19-226d-409f-afba-67be196077f7/openstack-network-exporter/0.log" Sep 30 21:00:54 crc kubenswrapper[4603]: I0930 21:00:54.650447 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9bb34996-e3b5-4c33-aff4-b85b34009e54/openstack-network-exporter/0.log" Sep 30 21:00:54 crc kubenswrapper[4603]: I0930 21:00:54.692420 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9bb34996-e3b5-4c33-aff4-b85b34009e54/ovsdbserver-sb/0.log" Sep 30 21:00:54 crc kubenswrapper[4603]: I0930 21:00:54.790023 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:00:54 crc kubenswrapper[4603]: E0930 21:00:54.792246 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.174428 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c9d79bbb8-vkl5v_866dea6a-1003-486a-9893-5ede909f55dd/placement-api/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.215776 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c9d79bbb8-vkl5v_866dea6a-1003-486a-9893-5ede909f55dd/placement-log/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.225255 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/setup-container/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.353899 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/setup-container/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.514423 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/setup-container/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.536504 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/rabbitmq/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.744058 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/setup-container/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.755380 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/rabbitmq/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.786608 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7_7b095899-7ded-4255-b88c-078c4e4f4d51/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:55 crc kubenswrapper[4603]: I0930 21:00:55.983532 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-2xbs9_0b2fc65f-3fea-42f7-903c-22d5ca817ad8/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.072509 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h_b7c30cf9-2ad9-4627-8364-293fec61fef1/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.236671 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-wmv5z_aff20c40-8319-4474-970b-9e7d3a672838/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.395915 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4tcb9_e8cf9e0f-89a8-4107-b6eb-3adc4978c983/ssh-known-hosts-edpm-deployment/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.636275 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5c4cbd6dd9-49g62_b5e87259-23eb-41cc-ba3a-ad1d47459e6a/proxy-httpd/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.693368 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5c4cbd6dd9-49g62_b5e87259-23eb-41cc-ba3a-ad1d47459e6a/proxy-server/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.799144 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-h66ml_bdec4dd6-c244-40d6-89c3-0644dd9421de/swift-ring-rebalance/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.913936 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-auditor/0.log" Sep 30 21:00:56 crc kubenswrapper[4603]: I0930 21:00:56.938291 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-reaper/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.090894 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-server/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.099703 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-auditor/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.106747 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-replicator/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.158743 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-replicator/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.269385 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-server/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.318527 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-updater/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.330425 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-auditor/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.337837 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-expirer/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.496960 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-replicator/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.522786 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-server/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.528035 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-updater/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.570413 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/rsync/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.684221 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/swift-recon-cron/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.750519 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-ltndq_d4d2e2e5-2559-4ee2-801c-1c8d9917e367/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.954978 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2c3f4ced-b58c-409a-a046-b0803bdd6d44/tempest-tests-tempest-tests-runner/0.log" Sep 30 21:00:57 crc kubenswrapper[4603]: I0930 21:00:57.974801 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f55ed2ad-4f61-4173-8ccb-9b67e72f494a/test-operator-logs-container/0.log" Sep 30 21:00:58 crc kubenswrapper[4603]: I0930 21:00:58.132781 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6kz72_2b982ca3-121d-442a-bd28-cf1623afe138/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.164827 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29321101-j7xbs"] Sep 30 21:01:00 crc kubenswrapper[4603]: E0930 21:01:00.165681 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f31b2e-42bf-4b16-8dcc-871e7862d74f" containerName="collect-profiles" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.165699 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f31b2e-42bf-4b16-8dcc-871e7862d74f" containerName="collect-profiles" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.165951 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f31b2e-42bf-4b16-8dcc-871e7862d74f" containerName="collect-profiles" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.166754 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.178268 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-j7xbs"] Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.245752 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.245819 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.245869 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.245887 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6k2s\" (UniqueName: \"kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.347510 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.347886 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.347949 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.347972 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6k2s\" (UniqueName: \"kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.365727 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.366803 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6k2s\" (UniqueName: \"kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.367039 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.367923 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data\") pod \"keystone-cron-29321101-j7xbs\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:00 crc kubenswrapper[4603]: I0930 21:01:00.487214 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:01 crc kubenswrapper[4603]: I0930 21:01:01.027367 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-j7xbs"] Sep 30 21:01:01 crc kubenswrapper[4603]: I0930 21:01:01.152746 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-j7xbs" event={"ID":"84a57b06-f74a-4bcd-b4ad-768af635a194","Type":"ContainerStarted","Data":"5055e3f15f5d4fef403f1d7ea19f5ebd8e66130712a7c688097d6a46421969ae"} Sep 30 21:01:02 crc kubenswrapper[4603]: I0930 21:01:02.162940 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-j7xbs" event={"ID":"84a57b06-f74a-4bcd-b4ad-768af635a194","Type":"ContainerStarted","Data":"b296c5efef05b10379b2fae29d8722bfdb3e5c02ef69c66559cc5e6512f54f9c"} Sep 30 21:01:06 crc kubenswrapper[4603]: I0930 21:01:06.195673 4603 generic.go:334] "Generic (PLEG): container finished" podID="84a57b06-f74a-4bcd-b4ad-768af635a194" containerID="b296c5efef05b10379b2fae29d8722bfdb3e5c02ef69c66559cc5e6512f54f9c" exitCode=0 Sep 30 21:01:06 crc kubenswrapper[4603]: I0930 21:01:06.195766 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-j7xbs" event={"ID":"84a57b06-f74a-4bcd-b4ad-768af635a194","Type":"ContainerDied","Data":"b296c5efef05b10379b2fae29d8722bfdb3e5c02ef69c66559cc5e6512f54f9c"} Sep 30 21:01:06 crc kubenswrapper[4603]: I0930 21:01:06.764835 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:01:06 crc kubenswrapper[4603]: E0930 21:01:06.765141 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.641667 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.787457 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data\") pod \"84a57b06-f74a-4bcd-b4ad-768af635a194\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.787767 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6k2s\" (UniqueName: \"kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s\") pod \"84a57b06-f74a-4bcd-b4ad-768af635a194\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.787866 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle\") pod \"84a57b06-f74a-4bcd-b4ad-768af635a194\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.788010 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys\") pod \"84a57b06-f74a-4bcd-b4ad-768af635a194\" (UID: \"84a57b06-f74a-4bcd-b4ad-768af635a194\") " Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.796629 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "84a57b06-f74a-4bcd-b4ad-768af635a194" (UID: "84a57b06-f74a-4bcd-b4ad-768af635a194"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.818436 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s" (OuterVolumeSpecName: "kube-api-access-q6k2s") pod "84a57b06-f74a-4bcd-b4ad-768af635a194" (UID: "84a57b06-f74a-4bcd-b4ad-768af635a194"). InnerVolumeSpecName "kube-api-access-q6k2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.881158 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84a57b06-f74a-4bcd-b4ad-768af635a194" (UID: "84a57b06-f74a-4bcd-b4ad-768af635a194"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.881752 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data" (OuterVolumeSpecName: "config-data") pod "84a57b06-f74a-4bcd-b4ad-768af635a194" (UID: "84a57b06-f74a-4bcd-b4ad-768af635a194"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.890364 4603 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.890402 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6k2s\" (UniqueName: \"kubernetes.io/projected/84a57b06-f74a-4bcd-b4ad-768af635a194-kube-api-access-q6k2s\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.890417 4603 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:07 crc kubenswrapper[4603]: I0930 21:01:07.890427 4603 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84a57b06-f74a-4bcd-b4ad-768af635a194-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:08 crc kubenswrapper[4603]: I0930 21:01:08.216553 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-j7xbs" event={"ID":"84a57b06-f74a-4bcd-b4ad-768af635a194","Type":"ContainerDied","Data":"5055e3f15f5d4fef403f1d7ea19f5ebd8e66130712a7c688097d6a46421969ae"} Sep 30 21:01:08 crc kubenswrapper[4603]: I0930 21:01:08.216597 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5055e3f15f5d4fef403f1d7ea19f5ebd8e66130712a7c688097d6a46421969ae" Sep 30 21:01:08 crc kubenswrapper[4603]: I0930 21:01:08.216662 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-j7xbs" Sep 30 21:01:18 crc kubenswrapper[4603]: I0930 21:01:18.769842 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:01:18 crc kubenswrapper[4603]: E0930 21:01:18.770619 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:01:31 crc kubenswrapper[4603]: I0930 21:01:31.764872 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:01:31 crc kubenswrapper[4603]: E0930 21:01:31.765516 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:01:38 crc kubenswrapper[4603]: I0930 21:01:38.514068 4603 generic.go:334] "Generic (PLEG): container finished" podID="baeefb53-216b-4c44-99ca-03638e9c9ed4" containerID="9d944602634d42b111deb04216cee22d222d7c230f9b4ac8d030f9058c6c3656" exitCode=0 Sep 30 21:01:38 crc kubenswrapper[4603]: I0930 21:01:38.514116 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" event={"ID":"baeefb53-216b-4c44-99ca-03638e9c9ed4","Type":"ContainerDied","Data":"9d944602634d42b111deb04216cee22d222d7c230f9b4ac8d030f9058c6c3656"} Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.648822 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.720320 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-klpq4"] Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.729959 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-klpq4"] Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.767578 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btczf\" (UniqueName: \"kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf\") pod \"baeefb53-216b-4c44-99ca-03638e9c9ed4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.767658 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host\") pod \"baeefb53-216b-4c44-99ca-03638e9c9ed4\" (UID: \"baeefb53-216b-4c44-99ca-03638e9c9ed4\") " Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.768129 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host" (OuterVolumeSpecName: "host") pod "baeefb53-216b-4c44-99ca-03638e9c9ed4" (UID: "baeefb53-216b-4c44-99ca-03638e9c9ed4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.774093 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf" (OuterVolumeSpecName: "kube-api-access-btczf") pod "baeefb53-216b-4c44-99ca-03638e9c9ed4" (UID: "baeefb53-216b-4c44-99ca-03638e9c9ed4"). InnerVolumeSpecName "kube-api-access-btczf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.870208 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btczf\" (UniqueName: \"kubernetes.io/projected/baeefb53-216b-4c44-99ca-03638e9c9ed4-kube-api-access-btczf\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:39 crc kubenswrapper[4603]: I0930 21:01:39.870575 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/baeefb53-216b-4c44-99ca-03638e9c9ed4-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.536997 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8394cdb84fa32bfd3076aa587a774ccf111917e99a38bb24e388f1311893396" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.537094 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-klpq4" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.778850 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baeefb53-216b-4c44-99ca-03638e9c9ed4" path="/var/lib/kubelet/pods/baeefb53-216b-4c44-99ca-03638e9c9ed4/volumes" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.896756 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-p26lm"] Sep 30 21:01:40 crc kubenswrapper[4603]: E0930 21:01:40.897420 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baeefb53-216b-4c44-99ca-03638e9c9ed4" containerName="container-00" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.897507 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="baeefb53-216b-4c44-99ca-03638e9c9ed4" containerName="container-00" Sep 30 21:01:40 crc kubenswrapper[4603]: E0930 21:01:40.897579 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a57b06-f74a-4bcd-b4ad-768af635a194" containerName="keystone-cron" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.897637 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a57b06-f74a-4bcd-b4ad-768af635a194" containerName="keystone-cron" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.897895 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a57b06-f74a-4bcd-b4ad-768af635a194" containerName="keystone-cron" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.897978 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="baeefb53-216b-4c44-99ca-03638e9c9ed4" containerName="container-00" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.898664 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.993251 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:40 crc kubenswrapper[4603]: I0930 21:01:40.993374 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hllqd\" (UniqueName: \"kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.095463 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hllqd\" (UniqueName: \"kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.095966 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.096078 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.112362 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hllqd\" (UniqueName: \"kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd\") pod \"crc-debug-p26lm\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.216739 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.546609 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" event={"ID":"ed48da0d-b01e-43fb-b53d-12861899be84","Type":"ContainerStarted","Data":"f21d0e84eb69a21ec3a9def7c871f1515cd8647573ad72c6200418bd329f6cb0"} Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.546930 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" event={"ID":"ed48da0d-b01e-43fb-b53d-12861899be84","Type":"ContainerStarted","Data":"43975731d9aa0f021569c35ca55d1c9f9047ed31de6dd2ccb105e563251a81ac"} Sep 30 21:01:41 crc kubenswrapper[4603]: I0930 21:01:41.567464 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" podStartSLOduration=1.567379581 podStartE2EDuration="1.567379581s" podCreationTimestamp="2025-09-30 21:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:41.558808454 +0000 UTC m=+4503.497267282" watchObservedRunningTime="2025-09-30 21:01:41.567379581 +0000 UTC m=+4503.505838399" Sep 30 21:01:42 crc kubenswrapper[4603]: I0930 21:01:42.555285 4603 generic.go:334] "Generic (PLEG): container finished" podID="ed48da0d-b01e-43fb-b53d-12861899be84" containerID="f21d0e84eb69a21ec3a9def7c871f1515cd8647573ad72c6200418bd329f6cb0" exitCode=0 Sep 30 21:01:42 crc kubenswrapper[4603]: I0930 21:01:42.555331 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" event={"ID":"ed48da0d-b01e-43fb-b53d-12861899be84","Type":"ContainerDied","Data":"f21d0e84eb69a21ec3a9def7c871f1515cd8647573ad72c6200418bd329f6cb0"} Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.671757 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.738380 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host\") pod \"ed48da0d-b01e-43fb-b53d-12861899be84\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.738599 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hllqd\" (UniqueName: \"kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd\") pod \"ed48da0d-b01e-43fb-b53d-12861899be84\" (UID: \"ed48da0d-b01e-43fb-b53d-12861899be84\") " Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.739019 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host" (OuterVolumeSpecName: "host") pod "ed48da0d-b01e-43fb-b53d-12861899be84" (UID: "ed48da0d-b01e-43fb-b53d-12861899be84"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.739379 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed48da0d-b01e-43fb-b53d-12861899be84-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.744907 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd" (OuterVolumeSpecName: "kube-api-access-hllqd") pod "ed48da0d-b01e-43fb-b53d-12861899be84" (UID: "ed48da0d-b01e-43fb-b53d-12861899be84"). InnerVolumeSpecName "kube-api-access-hllqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:43 crc kubenswrapper[4603]: I0930 21:01:43.840520 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hllqd\" (UniqueName: \"kubernetes.io/projected/ed48da0d-b01e-43fb-b53d-12861899be84-kube-api-access-hllqd\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:44 crc kubenswrapper[4603]: I0930 21:01:44.573043 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" event={"ID":"ed48da0d-b01e-43fb-b53d-12861899be84","Type":"ContainerDied","Data":"43975731d9aa0f021569c35ca55d1c9f9047ed31de6dd2ccb105e563251a81ac"} Sep 30 21:01:44 crc kubenswrapper[4603]: I0930 21:01:44.573085 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43975731d9aa0f021569c35ca55d1c9f9047ed31de6dd2ccb105e563251a81ac" Sep 30 21:01:44 crc kubenswrapper[4603]: I0930 21:01:44.573135 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-p26lm" Sep 30 21:01:44 crc kubenswrapper[4603]: I0930 21:01:44.763806 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:01:44 crc kubenswrapper[4603]: E0930 21:01:44.764083 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:01:49 crc kubenswrapper[4603]: I0930 21:01:49.703387 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-p26lm"] Sep 30 21:01:49 crc kubenswrapper[4603]: I0930 21:01:49.715613 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-p26lm"] Sep 30 21:01:50 crc kubenswrapper[4603]: I0930 21:01:50.777044 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed48da0d-b01e-43fb-b53d-12861899be84" path="/var/lib/kubelet/pods/ed48da0d-b01e-43fb-b53d-12861899be84/volumes" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.111878 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-tvt7n"] Sep 30 21:01:51 crc kubenswrapper[4603]: E0930 21:01:51.112680 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed48da0d-b01e-43fb-b53d-12861899be84" containerName="container-00" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.112750 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed48da0d-b01e-43fb-b53d-12861899be84" containerName="container-00" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.113005 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed48da0d-b01e-43fb-b53d-12861899be84" containerName="container-00" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.113634 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.164230 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9fsq\" (UniqueName: \"kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.164941 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.267479 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9fsq\" (UniqueName: \"kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.267641 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.267802 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.287803 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9fsq\" (UniqueName: \"kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq\") pod \"crc-debug-tvt7n\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.429288 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:51 crc kubenswrapper[4603]: I0930 21:01:51.639371 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" event={"ID":"ef102e5a-7c1f-4ef0-8567-09038627c6b7","Type":"ContainerStarted","Data":"58192de11b0895141d1d599c2ee333a6aac1558f8d0e7a73fe2262c976789561"} Sep 30 21:01:52 crc kubenswrapper[4603]: I0930 21:01:52.651019 4603 generic.go:334] "Generic (PLEG): container finished" podID="ef102e5a-7c1f-4ef0-8567-09038627c6b7" containerID="f97986d714b20bd2e3d98114dd93623f89596eed0146ddc410df194efbf18a70" exitCode=0 Sep 30 21:01:52 crc kubenswrapper[4603]: I0930 21:01:52.651101 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" event={"ID":"ef102e5a-7c1f-4ef0-8567-09038627c6b7","Type":"ContainerDied","Data":"f97986d714b20bd2e3d98114dd93623f89596eed0146ddc410df194efbf18a70"} Sep 30 21:01:52 crc kubenswrapper[4603]: I0930 21:01:52.694068 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-tvt7n"] Sep 30 21:01:52 crc kubenswrapper[4603]: I0930 21:01:52.743642 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zxd6r/crc-debug-tvt7n"] Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.757387 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.818253 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9fsq\" (UniqueName: \"kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq\") pod \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.818514 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host\") pod \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\" (UID: \"ef102e5a-7c1f-4ef0-8567-09038627c6b7\") " Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.818595 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host" (OuterVolumeSpecName: "host") pod "ef102e5a-7c1f-4ef0-8567-09038627c6b7" (UID: "ef102e5a-7c1f-4ef0-8567-09038627c6b7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.819035 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ef102e5a-7c1f-4ef0-8567-09038627c6b7-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.825149 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq" (OuterVolumeSpecName: "kube-api-access-r9fsq") pod "ef102e5a-7c1f-4ef0-8567-09038627c6b7" (UID: "ef102e5a-7c1f-4ef0-8567-09038627c6b7"). InnerVolumeSpecName "kube-api-access-r9fsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:53 crc kubenswrapper[4603]: I0930 21:01:53.920159 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9fsq\" (UniqueName: \"kubernetes.io/projected/ef102e5a-7c1f-4ef0-8567-09038627c6b7-kube-api-access-r9fsq\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:54 crc kubenswrapper[4603]: I0930 21:01:54.668244 4603 scope.go:117] "RemoveContainer" containerID="f97986d714b20bd2e3d98114dd93623f89596eed0146ddc410df194efbf18a70" Sep 30 21:01:54 crc kubenswrapper[4603]: I0930 21:01:54.668606 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/crc-debug-tvt7n" Sep 30 21:01:54 crc kubenswrapper[4603]: I0930 21:01:54.776201 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef102e5a-7c1f-4ef0-8567-09038627c6b7" path="/var/lib/kubelet/pods/ef102e5a-7c1f-4ef0-8567-09038627c6b7/volumes" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.068589 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.478936 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.482029 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.504942 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.717352 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.770880 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/extract/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.795774 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:01:55 crc kubenswrapper[4603]: I0930 21:01:55.934051 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-qgzmq_e17e463e-0a04-457a-a014-480772f91871/kube-rbac-proxy/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.088667 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-62mwn_b9de699a-42fd-40f8-94e3-ccddd9f2e6c2/kube-rbac-proxy/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.129183 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-qgzmq_e17e463e-0a04-457a-a014-480772f91871/manager/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.225229 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-62mwn_b9de699a-42fd-40f8-94e3-ccddd9f2e6c2/manager/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.416000 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kn7c7_63389a19-bdd5-4862-a0b0-f93a5df19823/kube-rbac-proxy/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.427360 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kn7c7_63389a19-bdd5-4862-a0b0-f93a5df19823/manager/0.log" Sep 30 21:01:56 crc kubenswrapper[4603]: I0930 21:01:56.764293 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:01:56 crc kubenswrapper[4603]: E0930 21:01:56.764582 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.043578 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-p6fms_3e6da4be-f92f-48ee-85e4-f316da7f6e27/kube-rbac-proxy/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.079339 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-p6fms_3e6da4be-f92f-48ee-85e4-f316da7f6e27/manager/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.289851 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-2m98j_9caa0cb4-2c14-430d-ac4a-942c78ec844e/manager/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.318894 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-2m98j_9caa0cb4-2c14-430d-ac4a-942c78ec844e/kube-rbac-proxy/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.355952 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-s29xr_3fb36813-9cc2-4668-ad3a-da10b9594f8a/kube-rbac-proxy/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.509910 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-s29xr_3fb36813-9cc2-4668-ad3a-da10b9594f8a/manager/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.548858 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-pszrb_e0158f35-7f0f-4c77-b761-6b624fc675f0/kube-rbac-proxy/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.738558 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-pszrb_e0158f35-7f0f-4c77-b761-6b624fc675f0/manager/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.840686 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-n2qf4_5b507d22-1613-4e76-948f-e4d55f160473/kube-rbac-proxy/0.log" Sep 30 21:01:57 crc kubenswrapper[4603]: I0930 21:01:57.874670 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-n2qf4_5b507d22-1613-4e76-948f-e4d55f160473/manager/0.log" Sep 30 21:01:58 crc kubenswrapper[4603]: I0930 21:01:58.779027 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-bxvz5_ac8d681e-168c-401e-9529-54098a214435/kube-rbac-proxy/0.log" Sep 30 21:01:58 crc kubenswrapper[4603]: I0930 21:01:58.937070 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-bxvz5_ac8d681e-168c-401e-9529-54098a214435/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.007303 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-mg4sm_961055da-fa39-4301-b30d-f0a61d41371a/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.009564 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-mg4sm_961055da-fa39-4301-b30d-f0a61d41371a/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.162208 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-fd845_17b3f01d-c7ac-4b96-a90b-02c645fa27ed/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.238196 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6cvcn_45381319-4688-4802-a937-e804b3d0e6b1/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.269721 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-fd845_17b3f01d-c7ac-4b96-a90b-02c645fa27ed/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.355057 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6cvcn_45381319-4688-4802-a937-e804b3d0e6b1/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.473605 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-6l5w5_3a7017fa-c8d0-493d-a338-ec3d2626a289/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.544906 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-6l5w5_3a7017fa-c8d0-493d-a338-ec3d2626a289/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.602238 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fbmst_e262072a-1f18-48fa-a2af-73466cc9a40b/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.686533 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fbmst_e262072a-1f18-48fa-a2af-73466cc9a40b/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.808001 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-gcb85_e6858f69-2a71-4459-89d4-59939c74b778/manager/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.850042 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-gcb85_e6858f69-2a71-4459-89d4-59939c74b778/kube-rbac-proxy/0.log" Sep 30 21:01:59 crc kubenswrapper[4603]: I0930 21:01:59.957050 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78964744f9-tbqf5_7c79bcd3-52f0-4d6b-8814-65ccfe3e9577/kube-rbac-proxy/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.097930 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dc7f48c86-wfrdl_e52d3ccd-fc80-4261-9043-2def9da416b6/kube-rbac-proxy/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.360623 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zgzz6_62cd7c09-702d-4432-a6ef-89900b8d4705/registry-server/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.493455 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dc7f48c86-wfrdl_e52d3ccd-fc80-4261-9043-2def9da416b6/operator/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.539482 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lcsw7_453ca8a5-9f93-4ad9-a0ef-14858d949b08/kube-rbac-proxy/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.691658 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lcsw7_453ca8a5-9f93-4ad9-a0ef-14858d949b08/manager/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.791193 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-dqmkb_e0835976-81c4-4f6f-aad4-0af0341168e2/kube-rbac-proxy/0.log" Sep 30 21:02:00 crc kubenswrapper[4603]: I0930 21:02:00.902322 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-dqmkb_e0835976-81c4-4f6f-aad4-0af0341168e2/manager/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.047695 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78964744f9-tbqf5_7c79bcd3-52f0-4d6b-8814-65ccfe3e9577/manager/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.175654 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x_0ef59238-520a-4221-8a49-40a4e1a1049d/operator/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.234874 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-z7l94_3a536984-9465-496f-9cfb-f48e32bd0c1b/kube-rbac-proxy/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.420469 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-z7l94_3a536984-9465-496f-9cfb-f48e32bd0c1b/manager/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.520386 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-28kqj_ae8ee517-97d6-422e-a058-c229d111e654/kube-rbac-proxy/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.564712 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-28kqj_ae8ee517-97d6-422e-a058-c229d111e654/manager/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.709148 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-h8j45_c1669984-9655-488e-a243-0a48f9e381c1/kube-rbac-proxy/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.735848 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-h8j45_c1669984-9655-488e-a243-0a48f9e381c1/manager/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.779225 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gvcj_58ddfa7e-b740-4d7d-ba1e-22d3c81a5870/kube-rbac-proxy/0.log" Sep 30 21:02:01 crc kubenswrapper[4603]: I0930 21:02:01.881963 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gvcj_58ddfa7e-b740-4d7d-ba1e-22d3c81a5870/manager/0.log" Sep 30 21:02:11 crc kubenswrapper[4603]: I0930 21:02:11.765042 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:02:11 crc kubenswrapper[4603]: E0930 21:02:11.766025 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:02:19 crc kubenswrapper[4603]: I0930 21:02:19.799107 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nfzcr_0e2e94ed-63a4-4335-8edd-67b592965119/control-plane-machine-set-operator/0.log" Sep 30 21:02:19 crc kubenswrapper[4603]: I0930 21:02:19.936128 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fgjdh_f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4/kube-rbac-proxy/0.log" Sep 30 21:02:20 crc kubenswrapper[4603]: I0930 21:02:20.005211 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fgjdh_f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4/machine-api-operator/0.log" Sep 30 21:02:24 crc kubenswrapper[4603]: I0930 21:02:24.764308 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:02:24 crc kubenswrapper[4603]: E0930 21:02:24.765419 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:02:35 crc kubenswrapper[4603]: I0930 21:02:35.120785 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-x4ppt_9fd7e687-d7c9-4656-9665-491bbec118a0/cert-manager-controller/0.log" Sep 30 21:02:35 crc kubenswrapper[4603]: I0930 21:02:35.758535 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-phxbq_b091df7c-bb72-483d-a232-76684ca02eeb/cert-manager-cainjector/0.log" Sep 30 21:02:35 crc kubenswrapper[4603]: I0930 21:02:35.776437 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-q7ttm_02bbad49-bd1e-4b2a-bcaf-e87517081eab/cert-manager-webhook/0.log" Sep 30 21:02:37 crc kubenswrapper[4603]: I0930 21:02:37.764426 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:02:37 crc kubenswrapper[4603]: E0930 21:02:37.764951 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.104494 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-zrpwd_bffd6d89-d0ee-4fff-b026-afada4f9ef81/nmstate-console-plugin/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.409290 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-85hq5_78744fb1-2861-4f48-ac88-15cc146d4602/nmstate-handler/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.453406 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-cpplb_cc0f8a40-4ff0-47cd-be21-9b3659cad490/kube-rbac-proxy/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.494505 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-cpplb_cc0f8a40-4ff0-47cd-be21-9b3659cad490/nmstate-metrics/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.697645 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-zlch6_bed1084e-39ed-437d-83ba-ae195cd14423/nmstate-operator/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.702054 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-lcqr7_e4caa662-2ce2-4110-bdd5-989f27772b4c/nmstate-webhook/0.log" Sep 30 21:02:48 crc kubenswrapper[4603]: I0930 21:02:48.782802 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:02:49 crc kubenswrapper[4603]: I0930 21:02:49.176498 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f"} Sep 30 21:03:03 crc kubenswrapper[4603]: I0930 21:03:03.428928 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-fg62p_4c195024-2cbe-4d5a-93f9-9cf1d5380440/kube-rbac-proxy/0.log" Sep 30 21:03:03 crc kubenswrapper[4603]: I0930 21:03:03.515238 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-fg62p_4c195024-2cbe-4d5a-93f9-9cf1d5380440/controller/0.log" Sep 30 21:03:03 crc kubenswrapper[4603]: I0930 21:03:03.647944 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.481999 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.586332 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.624739 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.642512 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.839873 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.851045 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.890275 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:03:04 crc kubenswrapper[4603]: I0930 21:03:04.937385 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.007635 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.038999 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.113231 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.143671 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/controller/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.320258 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/frr-metrics/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.350709 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/kube-rbac-proxy/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.413777 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/kube-rbac-proxy-frr/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.558253 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/reloader/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.682459 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-hv78l_a308f483-402e-4254-a1c8-440883cde4b9/frr-k8s-webhook-server/0.log" Sep 30 21:03:05 crc kubenswrapper[4603]: I0930 21:03:05.957025 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7b54d77bb4-zvnfm_eaf8636d-76cf-40c3-9e77-1b898b6e00be/manager/0.log" Sep 30 21:03:06 crc kubenswrapper[4603]: I0930 21:03:06.182185 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-696dffd44d-bfmjn_3aad58f2-49aa-472d-a347-92a699c7c78a/webhook-server/0.log" Sep 30 21:03:06 crc kubenswrapper[4603]: I0930 21:03:06.425705 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-87tdm_ead35fe0-3ed6-4cb2-943c-1f3609f978d3/kube-rbac-proxy/0.log" Sep 30 21:03:06 crc kubenswrapper[4603]: I0930 21:03:06.445209 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/frr/0.log" Sep 30 21:03:06 crc kubenswrapper[4603]: I0930 21:03:06.922375 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-87tdm_ead35fe0-3ed6-4cb2-943c-1f3609f978d3/speaker/0.log" Sep 30 21:03:20 crc kubenswrapper[4603]: I0930 21:03:20.494299 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:03:20 crc kubenswrapper[4603]: I0930 21:03:20.713775 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:03:20 crc kubenswrapper[4603]: I0930 21:03:20.738519 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:03:20 crc kubenswrapper[4603]: I0930 21:03:20.766968 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:03:20 crc kubenswrapper[4603]: I0930 21:03:20.974253 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.047444 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/extract/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.133727 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.217219 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.360019 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.423317 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:03:21 crc kubenswrapper[4603]: I0930 21:03:21.515248 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.059985 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.089504 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.625490 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/registry-server/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.631879 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.829578 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.864925 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:03:22 crc kubenswrapper[4603]: I0930 21:03:22.954962 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.042704 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.122647 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.375398 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.708327 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.719933 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.732811 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/registry-server/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.755665 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:03:23 crc kubenswrapper[4603]: I0930 21:03:23.982181 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.014332 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/extract/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.030758 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.699607 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-dg8pl_cc7471b5-5468-4585-a14c-dec890fce87f/marketplace-operator/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.774499 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.920846 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.960806 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:03:24 crc kubenswrapper[4603]: I0930 21:03:24.985112 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.203396 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.341817 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/registry-server/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.349835 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.362152 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.569113 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.591348 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.600644 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.792382 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:03:25 crc kubenswrapper[4603]: I0930 21:03:25.834824 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:03:26 crc kubenswrapper[4603]: I0930 21:03:26.343992 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/registry-server/0.log" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.721200 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:21 crc kubenswrapper[4603]: E0930 21:04:21.722043 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef102e5a-7c1f-4ef0-8567-09038627c6b7" containerName="container-00" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.722055 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef102e5a-7c1f-4ef0-8567-09038627c6b7" containerName="container-00" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.722275 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef102e5a-7c1f-4ef0-8567-09038627c6b7" containerName="container-00" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.723772 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.732277 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.826300 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.826864 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.826949 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56c7l\" (UniqueName: \"kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.930635 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.931128 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.932068 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.932408 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.932102 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56c7l\" (UniqueName: \"kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:21 crc kubenswrapper[4603]: I0930 21:04:21.965250 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56c7l\" (UniqueName: \"kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l\") pod \"community-operators-2qtc7\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:22 crc kubenswrapper[4603]: I0930 21:04:22.066626 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:22 crc kubenswrapper[4603]: I0930 21:04:22.629144 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:22 crc kubenswrapper[4603]: W0930 21:04:22.640134 4603 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc36aec5c_5c51_4f74_aac6_8a0e8068b2cf.slice/crio-a9c5705c6684d3814f7fdc5385db3e0729104da19315b1c8561ea479485ecd9f WatchSource:0}: Error finding container a9c5705c6684d3814f7fdc5385db3e0729104da19315b1c8561ea479485ecd9f: Status 404 returned error can't find the container with id a9c5705c6684d3814f7fdc5385db3e0729104da19315b1c8561ea479485ecd9f Sep 30 21:04:23 crc kubenswrapper[4603]: I0930 21:04:23.106847 4603 generic.go:334] "Generic (PLEG): container finished" podID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerID="a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649" exitCode=0 Sep 30 21:04:23 crc kubenswrapper[4603]: I0930 21:04:23.106976 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerDied","Data":"a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649"} Sep 30 21:04:23 crc kubenswrapper[4603]: I0930 21:04:23.107129 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerStarted","Data":"a9c5705c6684d3814f7fdc5385db3e0729104da19315b1c8561ea479485ecd9f"} Sep 30 21:04:23 crc kubenswrapper[4603]: I0930 21:04:23.108762 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:04:24 crc kubenswrapper[4603]: I0930 21:04:24.130024 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerStarted","Data":"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478"} Sep 30 21:04:25 crc kubenswrapper[4603]: I0930 21:04:25.140761 4603 generic.go:334] "Generic (PLEG): container finished" podID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerID="42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478" exitCode=0 Sep 30 21:04:25 crc kubenswrapper[4603]: I0930 21:04:25.140812 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerDied","Data":"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478"} Sep 30 21:04:26 crc kubenswrapper[4603]: I0930 21:04:26.154548 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerStarted","Data":"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2"} Sep 30 21:04:26 crc kubenswrapper[4603]: I0930 21:04:26.176513 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2qtc7" podStartSLOduration=2.722541698 podStartE2EDuration="5.176489515s" podCreationTimestamp="2025-09-30 21:04:21 +0000 UTC" firstStartedPulling="2025-09-30 21:04:23.108483176 +0000 UTC m=+4665.046941994" lastFinishedPulling="2025-09-30 21:04:25.562430953 +0000 UTC m=+4667.500889811" observedRunningTime="2025-09-30 21:04:26.170939241 +0000 UTC m=+4668.109398099" watchObservedRunningTime="2025-09-30 21:04:26.176489515 +0000 UTC m=+4668.114948333" Sep 30 21:04:32 crc kubenswrapper[4603]: I0930 21:04:32.067077 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:32 crc kubenswrapper[4603]: I0930 21:04:32.067807 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:32 crc kubenswrapper[4603]: I0930 21:04:32.504227 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:32 crc kubenswrapper[4603]: I0930 21:04:32.558542 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:32 crc kubenswrapper[4603]: I0930 21:04:32.748022 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.236980 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2qtc7" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="registry-server" containerID="cri-o://d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2" gracePeriod=2 Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.763057 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.850823 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content\") pod \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.850862 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities\") pod \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.850938 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56c7l\" (UniqueName: \"kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l\") pod \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\" (UID: \"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf\") " Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.855584 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities" (OuterVolumeSpecName: "utilities") pod "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" (UID: "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.873512 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l" (OuterVolumeSpecName: "kube-api-access-56c7l") pod "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" (UID: "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf"). InnerVolumeSpecName "kube-api-access-56c7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.901684 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" (UID: "c36aec5c-5c51-4f74-aac6-8a0e8068b2cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.953330 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.953361 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:34 crc kubenswrapper[4603]: I0930 21:04:34.953372 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56c7l\" (UniqueName: \"kubernetes.io/projected/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf-kube-api-access-56c7l\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.251624 4603 generic.go:334] "Generic (PLEG): container finished" podID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerID="d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2" exitCode=0 Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.251688 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerDied","Data":"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2"} Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.251726 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qtc7" event={"ID":"c36aec5c-5c51-4f74-aac6-8a0e8068b2cf","Type":"ContainerDied","Data":"a9c5705c6684d3814f7fdc5385db3e0729104da19315b1c8561ea479485ecd9f"} Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.251753 4603 scope.go:117] "RemoveContainer" containerID="d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.251919 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qtc7" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.284383 4603 scope.go:117] "RemoveContainer" containerID="42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.305944 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.311146 4603 scope.go:117] "RemoveContainer" containerID="a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.316300 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2qtc7"] Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.365542 4603 scope.go:117] "RemoveContainer" containerID="d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2" Sep 30 21:04:35 crc kubenswrapper[4603]: E0930 21:04:35.365986 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2\": container with ID starting with d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2 not found: ID does not exist" containerID="d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.366024 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2"} err="failed to get container status \"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2\": rpc error: code = NotFound desc = could not find container \"d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2\": container with ID starting with d2fe045852fc35d0302880f83ccf67f7f6e667b1e8ed2cfde3f168d54d4e2ce2 not found: ID does not exist" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.366052 4603 scope.go:117] "RemoveContainer" containerID="42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478" Sep 30 21:04:35 crc kubenswrapper[4603]: E0930 21:04:35.366312 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478\": container with ID starting with 42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478 not found: ID does not exist" containerID="42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.366351 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478"} err="failed to get container status \"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478\": rpc error: code = NotFound desc = could not find container \"42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478\": container with ID starting with 42da66523fe405a6486dbad14e1329bc60b39a397d7418b1555768e589710478 not found: ID does not exist" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.366371 4603 scope.go:117] "RemoveContainer" containerID="a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649" Sep 30 21:04:35 crc kubenswrapper[4603]: E0930 21:04:35.366622 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649\": container with ID starting with a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649 not found: ID does not exist" containerID="a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649" Sep 30 21:04:35 crc kubenswrapper[4603]: I0930 21:04:35.366662 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649"} err="failed to get container status \"a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649\": rpc error: code = NotFound desc = could not find container \"a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649\": container with ID starting with a96bba4652b4cf77b51b03e11345e8bf5c1aafff1193b2eca19e6e381b2b2649 not found: ID does not exist" Sep 30 21:04:36 crc kubenswrapper[4603]: I0930 21:04:36.777842 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" path="/var/lib/kubelet/pods/c36aec5c-5c51-4f74-aac6-8a0e8068b2cf/volumes" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.023542 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:07 crc kubenswrapper[4603]: E0930 21:05:07.027479 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="extract-utilities" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.027863 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="extract-utilities" Sep 30 21:05:07 crc kubenswrapper[4603]: E0930 21:05:07.028025 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="registry-server" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.028156 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="registry-server" Sep 30 21:05:07 crc kubenswrapper[4603]: E0930 21:05:07.028417 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="extract-content" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.028555 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="extract-content" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.029640 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36aec5c-5c51-4f74-aac6-8a0e8068b2cf" containerName="registry-server" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.031851 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.046495 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.149667 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jpcf\" (UniqueName: \"kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.150042 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.150091 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.251564 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.251622 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.251717 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jpcf\" (UniqueName: \"kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.252423 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.252635 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.273123 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jpcf\" (UniqueName: \"kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf\") pod \"redhat-operators-769zh\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.365901 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:07 crc kubenswrapper[4603]: I0930 21:05:07.831620 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:08 crc kubenswrapper[4603]: I0930 21:05:08.442419 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:05:08 crc kubenswrapper[4603]: I0930 21:05:08.442809 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:05:08 crc kubenswrapper[4603]: I0930 21:05:08.691458 4603 generic.go:334] "Generic (PLEG): container finished" podID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerID="1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285" exitCode=0 Sep 30 21:05:08 crc kubenswrapper[4603]: I0930 21:05:08.691778 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerDied","Data":"1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285"} Sep 30 21:05:08 crc kubenswrapper[4603]: I0930 21:05:08.691807 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerStarted","Data":"cc2693cde09602f6b0971df162c8021d270221aff39bc86462b2b2dc658d9b16"} Sep 30 21:05:10 crc kubenswrapper[4603]: I0930 21:05:10.713282 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerStarted","Data":"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830"} Sep 30 21:05:13 crc kubenswrapper[4603]: I0930 21:05:13.759396 4603 generic.go:334] "Generic (PLEG): container finished" podID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerID="b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830" exitCode=0 Sep 30 21:05:13 crc kubenswrapper[4603]: I0930 21:05:13.759492 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerDied","Data":"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830"} Sep 30 21:05:14 crc kubenswrapper[4603]: I0930 21:05:14.782496 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerStarted","Data":"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89"} Sep 30 21:05:14 crc kubenswrapper[4603]: I0930 21:05:14.819061 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-769zh" podStartSLOduration=3.3162476659999998 podStartE2EDuration="8.819044971s" podCreationTimestamp="2025-09-30 21:05:06 +0000 UTC" firstStartedPulling="2025-09-30 21:05:08.694780568 +0000 UTC m=+4710.633239416" lastFinishedPulling="2025-09-30 21:05:14.197577903 +0000 UTC m=+4716.136036721" observedRunningTime="2025-09-30 21:05:14.811643785 +0000 UTC m=+4716.750102613" watchObservedRunningTime="2025-09-30 21:05:14.819044971 +0000 UTC m=+4716.757503789" Sep 30 21:05:17 crc kubenswrapper[4603]: I0930 21:05:17.366936 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:17 crc kubenswrapper[4603]: I0930 21:05:17.367345 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:18 crc kubenswrapper[4603]: I0930 21:05:18.412476 4603 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-769zh" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="registry-server" probeResult="failure" output=< Sep 30 21:05:18 crc kubenswrapper[4603]: timeout: failed to connect service ":50051" within 1s Sep 30 21:05:18 crc kubenswrapper[4603]: > Sep 30 21:05:27 crc kubenswrapper[4603]: I0930 21:05:27.470968 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:27 crc kubenswrapper[4603]: I0930 21:05:27.561258 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:27 crc kubenswrapper[4603]: I0930 21:05:27.733487 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:28 crc kubenswrapper[4603]: I0930 21:05:28.917260 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-769zh" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="registry-server" containerID="cri-o://d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89" gracePeriod=2 Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.389173 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.560613 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jpcf\" (UniqueName: \"kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf\") pod \"0ed23042-17a0-4156-b8d5-a532e2bebeab\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.560679 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content\") pod \"0ed23042-17a0-4156-b8d5-a532e2bebeab\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.560835 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities\") pod \"0ed23042-17a0-4156-b8d5-a532e2bebeab\" (UID: \"0ed23042-17a0-4156-b8d5-a532e2bebeab\") " Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.561587 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities" (OuterVolumeSpecName: "utilities") pod "0ed23042-17a0-4156-b8d5-a532e2bebeab" (UID: "0ed23042-17a0-4156-b8d5-a532e2bebeab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.570485 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf" (OuterVolumeSpecName: "kube-api-access-4jpcf") pod "0ed23042-17a0-4156-b8d5-a532e2bebeab" (UID: "0ed23042-17a0-4156-b8d5-a532e2bebeab"). InnerVolumeSpecName "kube-api-access-4jpcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.656331 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ed23042-17a0-4156-b8d5-a532e2bebeab" (UID: "0ed23042-17a0-4156-b8d5-a532e2bebeab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.663443 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jpcf\" (UniqueName: \"kubernetes.io/projected/0ed23042-17a0-4156-b8d5-a532e2bebeab-kube-api-access-4jpcf\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.663476 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.663487 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ed23042-17a0-4156-b8d5-a532e2bebeab-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.935052 4603 generic.go:334] "Generic (PLEG): container finished" podID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerID="d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89" exitCode=0 Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.935096 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerDied","Data":"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89"} Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.935120 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-769zh" event={"ID":"0ed23042-17a0-4156-b8d5-a532e2bebeab","Type":"ContainerDied","Data":"cc2693cde09602f6b0971df162c8021d270221aff39bc86462b2b2dc658d9b16"} Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.935137 4603 scope.go:117] "RemoveContainer" containerID="d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.935280 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-769zh" Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.978378 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.987125 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-769zh"] Sep 30 21:05:29 crc kubenswrapper[4603]: I0930 21:05:29.993572 4603 scope.go:117] "RemoveContainer" containerID="b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.033632 4603 scope.go:117] "RemoveContainer" containerID="1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.076450 4603 scope.go:117] "RemoveContainer" containerID="d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89" Sep 30 21:05:30 crc kubenswrapper[4603]: E0930 21:05:30.078783 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89\": container with ID starting with d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89 not found: ID does not exist" containerID="d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.078829 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89"} err="failed to get container status \"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89\": rpc error: code = NotFound desc = could not find container \"d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89\": container with ID starting with d318212eee95c88e8ea7a36358e57033a233e917a60feca982789ac25599ba89 not found: ID does not exist" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.078854 4603 scope.go:117] "RemoveContainer" containerID="b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830" Sep 30 21:05:30 crc kubenswrapper[4603]: E0930 21:05:30.079206 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830\": container with ID starting with b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830 not found: ID does not exist" containerID="b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.079247 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830"} err="failed to get container status \"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830\": rpc error: code = NotFound desc = could not find container \"b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830\": container with ID starting with b8a8b5292e11281b7d8b9a979ccc56fb1785eb668730d823dce65c14ff653830 not found: ID does not exist" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.079279 4603 scope.go:117] "RemoveContainer" containerID="1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285" Sep 30 21:05:30 crc kubenswrapper[4603]: E0930 21:05:30.079549 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285\": container with ID starting with 1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285 not found: ID does not exist" containerID="1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.079579 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285"} err="failed to get container status \"1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285\": rpc error: code = NotFound desc = could not find container \"1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285\": container with ID starting with 1396a4533c4a239285bc180a5a1ec5deae55ca6bdf6843c015bc186dad5f0285 not found: ID does not exist" Sep 30 21:05:30 crc kubenswrapper[4603]: I0930 21:05:30.786328 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" path="/var/lib/kubelet/pods/0ed23042-17a0-4156-b8d5-a532e2bebeab/volumes" Sep 30 21:05:38 crc kubenswrapper[4603]: I0930 21:05:38.442043 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:05:38 crc kubenswrapper[4603]: I0930 21:05:38.442397 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:05:48 crc kubenswrapper[4603]: I0930 21:05:48.120902 4603 scope.go:117] "RemoveContainer" containerID="9d944602634d42b111deb04216cee22d222d7c230f9b4ac8d030f9058c6c3656" Sep 30 21:06:00 crc kubenswrapper[4603]: I0930 21:06:00.268224 4603 generic.go:334] "Generic (PLEG): container finished" podID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerID="ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491" exitCode=0 Sep 30 21:06:00 crc kubenswrapper[4603]: I0930 21:06:00.268321 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" event={"ID":"836a5274-ebb6-408d-bbe8-7e304b3d0d56","Type":"ContainerDied","Data":"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491"} Sep 30 21:06:00 crc kubenswrapper[4603]: I0930 21:06:00.269432 4603 scope.go:117] "RemoveContainer" containerID="ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491" Sep 30 21:06:01 crc kubenswrapper[4603]: I0930 21:06:01.112551 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zxd6r_must-gather-pnj2m_836a5274-ebb6-408d-bbe8-7e304b3d0d56/gather/0.log" Sep 30 21:06:08 crc kubenswrapper[4603]: I0930 21:06:08.441574 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:06:08 crc kubenswrapper[4603]: I0930 21:06:08.442383 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:06:08 crc kubenswrapper[4603]: I0930 21:06:08.442457 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 21:06:08 crc kubenswrapper[4603]: I0930 21:06:08.443658 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:06:08 crc kubenswrapper[4603]: I0930 21:06:08.443776 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f" gracePeriod=600 Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.361295 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f" exitCode=0 Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.361849 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f"} Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.361880 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b"} Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.361896 4603 scope.go:117] "RemoveContainer" containerID="a2cb62e22ec040233980ed3a7c3c1e033bfae25b4e7bede50b312b536cfc9511" Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.683109 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zxd6r/must-gather-pnj2m"] Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.683727 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="copy" containerID="cri-o://24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945" gracePeriod=2 Sep 30 21:06:09 crc kubenswrapper[4603]: I0930 21:06:09.695752 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zxd6r/must-gather-pnj2m"] Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.094776 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zxd6r_must-gather-pnj2m_836a5274-ebb6-408d-bbe8-7e304b3d0d56/copy/0.log" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.095472 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.240017 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output\") pod \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.240273 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g277k\" (UniqueName: \"kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k\") pod \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\" (UID: \"836a5274-ebb6-408d-bbe8-7e304b3d0d56\") " Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.246927 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k" (OuterVolumeSpecName: "kube-api-access-g277k") pod "836a5274-ebb6-408d-bbe8-7e304b3d0d56" (UID: "836a5274-ebb6-408d-bbe8-7e304b3d0d56"). InnerVolumeSpecName "kube-api-access-g277k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.345484 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g277k\" (UniqueName: \"kubernetes.io/projected/836a5274-ebb6-408d-bbe8-7e304b3d0d56-kube-api-access-g277k\") on node \"crc\" DevicePath \"\"" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.370723 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zxd6r_must-gather-pnj2m_836a5274-ebb6-408d-bbe8-7e304b3d0d56/copy/0.log" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.371157 4603 generic.go:334] "Generic (PLEG): container finished" podID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerID="24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945" exitCode=143 Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.371270 4603 scope.go:117] "RemoveContainer" containerID="24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.371227 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zxd6r/must-gather-pnj2m" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.405012 4603 scope.go:117] "RemoveContainer" containerID="ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.414998 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "836a5274-ebb6-408d-bbe8-7e304b3d0d56" (UID: "836a5274-ebb6-408d-bbe8-7e304b3d0d56"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.447509 4603 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/836a5274-ebb6-408d-bbe8-7e304b3d0d56-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.488436 4603 scope.go:117] "RemoveContainer" containerID="24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945" Sep 30 21:06:10 crc kubenswrapper[4603]: E0930 21:06:10.489007 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945\": container with ID starting with 24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945 not found: ID does not exist" containerID="24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.489066 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945"} err="failed to get container status \"24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945\": rpc error: code = NotFound desc = could not find container \"24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945\": container with ID starting with 24a2e2fd4da9ceff3796af4d09502ad2aa3509c03e69f6f6420edbea023b6945 not found: ID does not exist" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.489104 4603 scope.go:117] "RemoveContainer" containerID="ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491" Sep 30 21:06:10 crc kubenswrapper[4603]: E0930 21:06:10.489853 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491\": container with ID starting with ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491 not found: ID does not exist" containerID="ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.489894 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491"} err="failed to get container status \"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491\": rpc error: code = NotFound desc = could not find container \"ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491\": container with ID starting with ce061b653083a8968728ca01247f5375982d39387caca61c10fa05467f006491 not found: ID does not exist" Sep 30 21:06:10 crc kubenswrapper[4603]: I0930 21:06:10.776495 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" path="/var/lib/kubelet/pods/836a5274-ebb6-408d-bbe8-7e304b3d0d56/volumes" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.323276 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-svmwc/must-gather-b2ljj"] Sep 30 21:06:48 crc kubenswrapper[4603]: E0930 21:06:48.325058 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="copy" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325138 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="copy" Sep 30 21:06:48 crc kubenswrapper[4603]: E0930 21:06:48.325312 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="gather" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325373 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="gather" Sep 30 21:06:48 crc kubenswrapper[4603]: E0930 21:06:48.325431 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="extract-utilities" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325483 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="extract-utilities" Sep 30 21:06:48 crc kubenswrapper[4603]: E0930 21:06:48.325550 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="registry-server" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325602 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="registry-server" Sep 30 21:06:48 crc kubenswrapper[4603]: E0930 21:06:48.325687 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="extract-content" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325741 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="extract-content" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.325984 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="gather" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.326067 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="836a5274-ebb6-408d-bbe8-7e304b3d0d56" containerName="copy" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.326131 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ed23042-17a0-4156-b8d5-a532e2bebeab" containerName="registry-server" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.327176 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.330491 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-svmwc"/"kube-root-ca.crt" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.331800 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-svmwc"/"default-dockercfg-x6mj7" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.332410 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-svmwc"/"openshift-service-ca.crt" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.333838 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-svmwc/must-gather-b2ljj"] Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.504719 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dnw7\" (UniqueName: \"kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.504933 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.606159 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.606328 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dnw7\" (UniqueName: \"kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.606813 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.625159 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dnw7\" (UniqueName: \"kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7\") pod \"must-gather-b2ljj\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:48 crc kubenswrapper[4603]: I0930 21:06:48.644915 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:06:49 crc kubenswrapper[4603]: I0930 21:06:49.159499 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-svmwc/must-gather-b2ljj"] Sep 30 21:06:49 crc kubenswrapper[4603]: I0930 21:06:49.865618 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/must-gather-b2ljj" event={"ID":"13faae60-7a72-43f4-83a9-5e67ea0c55e1","Type":"ContainerStarted","Data":"43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f"} Sep 30 21:06:49 crc kubenswrapper[4603]: I0930 21:06:49.865931 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/must-gather-b2ljj" event={"ID":"13faae60-7a72-43f4-83a9-5e67ea0c55e1","Type":"ContainerStarted","Data":"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4"} Sep 30 21:06:49 crc kubenswrapper[4603]: I0930 21:06:49.865944 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/must-gather-b2ljj" event={"ID":"13faae60-7a72-43f4-83a9-5e67ea0c55e1","Type":"ContainerStarted","Data":"4e6ca2d903680d4480ff1d16a3bf81e327dc38adfb4a3de032ad1209ce4eecd9"} Sep 30 21:06:49 crc kubenswrapper[4603]: I0930 21:06:49.894689 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-svmwc/must-gather-b2ljj" podStartSLOduration=1.894671856 podStartE2EDuration="1.894671856s" podCreationTimestamp="2025-09-30 21:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:06:49.889455331 +0000 UTC m=+4811.827914149" watchObservedRunningTime="2025-09-30 21:06:49.894671856 +0000 UTC m=+4811.833130664" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.304385 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-svmwc/crc-debug-cnghr"] Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.305863 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.419958 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.420283 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5kkg\" (UniqueName: \"kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.522495 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.522627 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5kkg\" (UniqueName: \"kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.522672 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.542908 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5kkg\" (UniqueName: \"kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg\") pod \"crc-debug-cnghr\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.627368 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:06:54 crc kubenswrapper[4603]: I0930 21:06:54.920065 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-cnghr" event={"ID":"24e71c3d-b2fa-446a-ac77-735d7aba95af","Type":"ContainerStarted","Data":"e28102c461f104ec43b46a6fbee15013a6159a3fe3d8a661c9a765940f007097"} Sep 30 21:06:55 crc kubenswrapper[4603]: I0930 21:06:55.930218 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-cnghr" event={"ID":"24e71c3d-b2fa-446a-ac77-735d7aba95af","Type":"ContainerStarted","Data":"5e019f1a7f7cb872dd433d9e15d7d7ab846d7267e83862cf769573d20eaae146"} Sep 30 21:06:55 crc kubenswrapper[4603]: I0930 21:06:55.949527 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-svmwc/crc-debug-cnghr" podStartSLOduration=1.949502706 podStartE2EDuration="1.949502706s" podCreationTimestamp="2025-09-30 21:06:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:06:55.948942351 +0000 UTC m=+4817.887401169" watchObservedRunningTime="2025-09-30 21:06:55.949502706 +0000 UTC m=+4817.887961524" Sep 30 21:07:48 crc kubenswrapper[4603]: I0930 21:07:48.266338 4603 scope.go:117] "RemoveContainer" containerID="f21d0e84eb69a21ec3a9def7c871f1515cd8647573ad72c6200418bd329f6cb0" Sep 30 21:08:08 crc kubenswrapper[4603]: I0930 21:08:08.441677 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:08:08 crc kubenswrapper[4603]: I0930 21:08:08.442350 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.238216 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57b5d479d8-xlxfm_850ec93c-cfa1-4bb4-905b-1b8296985c50/barbican-api/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.276891 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-57b5d479d8-xlxfm_850ec93c-cfa1-4bb4-905b-1b8296985c50/barbican-api-log/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.399315 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-794f5b84fd-6qbxk_47e3b799-3f78-46c1-916e-cca00da66c8c/barbican-keystone-listener/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.493308 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-794f5b84fd-6qbxk_47e3b799-3f78-46c1-916e-cca00da66c8c/barbican-keystone-listener-log/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.669604 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-694445bff9-srxdg_da706fb6-9ab9-4c32-bd34-2b9afe444c20/barbican-worker/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.748873 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-694445bff9-srxdg_da706fb6-9ab9-4c32-bd34-2b9afe444c20/barbican-worker-log/0.log" Sep 30 21:08:32 crc kubenswrapper[4603]: I0930 21:08:32.880390 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-4wrjd_d826ad98-bfbb-4355-b0a2-c7ea9715b990/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.187714 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/ceilometer-notification-agent/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.213841 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/ceilometer-central-agent/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.319895 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/proxy-httpd/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.624426 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f8849bc2-be9b-4897-9501-36c14d4e51f2/cinder-api/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.644069 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c7365d55-02e8-49ff-a924-590c17d22105/sg-core/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.835205 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f8849bc2-be9b-4897-9501-36c14d4e51f2/cinder-api-log/0.log" Sep 30 21:08:33 crc kubenswrapper[4603]: I0930 21:08:33.959188 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b40198c2-8d68-4aab-9744-67114df39cc8/cinder-scheduler/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.096059 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b40198c2-8d68-4aab-9744-67114df39cc8/probe/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.267894 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-47wnn_af8365b2-113b-4c7b-8781-17cecdd6d3dd/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.534831 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-97w2f_b85ce012-d065-4005-9bbd-7bebe194cb45/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.674500 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-9n4sx_260c57d2-7dcf-404e-83c2-64a074939299/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.724298 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/init/0.log" Sep 30 21:08:34 crc kubenswrapper[4603]: I0930 21:08:34.929454 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/init/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.093012 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-xkkjn_c8abae1f-7c59-4d4d-ad61-30628cb8871d/dnsmasq-dns/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.228751 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-nrkss_71fad0dd-a3d8-42b4-ab00-d98aa7368c5f/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.384244 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c24dd570-1c48-407f-bb26-0d85ab367883/glance-httpd/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.421635 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c24dd570-1c48-407f-bb26-0d85ab367883/glance-log/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.625952 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7/glance-httpd/0.log" Sep 30 21:08:35 crc kubenswrapper[4603]: I0930 21:08:35.693549 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_60324d02-1dfc-4bb9-b4c6-227bf3c3e3b7/glance-log/0.log" Sep 30 21:08:36 crc kubenswrapper[4603]: I0930 21:08:36.001752 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon/2.log" Sep 30 21:08:36 crc kubenswrapper[4603]: I0930 21:08:36.014913 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon/1.log" Sep 30 21:08:36 crc kubenswrapper[4603]: I0930 21:08:36.329360 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hndqq_f4f9e63e-bace-4185-a45a-cbc16d4be310/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:36 crc kubenswrapper[4603]: I0930 21:08:36.619507 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7cc565dc7d-zt9pz_53799743-167b-4a74-9cab-3e591a04391b/horizon-log/0.log" Sep 30 21:08:36 crc kubenswrapper[4603]: I0930 21:08:36.668238 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-zvdt8_fea678fb-af98-424b-9231-32d6991910a3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:37 crc kubenswrapper[4603]: I0930 21:08:37.265293 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-8bd66565b-k2wg7_614e45f1-3173-4eb1-8b47-56760f3468f4/keystone-api/0.log" Sep 30 21:08:37 crc kubenswrapper[4603]: I0930 21:08:37.508515 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_96ef0530-4c62-4ebf-b58d-59284fcdcad0/kube-state-metrics/0.log" Sep 30 21:08:37 crc kubenswrapper[4603]: I0930 21:08:37.521314 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321101-j7xbs_84a57b06-f74a-4bcd-b4ad-768af635a194/keystone-cron/0.log" Sep 30 21:08:37 crc kubenswrapper[4603]: I0930 21:08:37.725005 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-bxr6h_fbf155fd-4bef-49a0-8bf0-eb16974f5e89/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:38 crc kubenswrapper[4603]: I0930 21:08:38.420852 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cfbc4c69-xfx2z_6d92de7a-d198-431c-a00c-bf93f63890c0/neutron-httpd/0.log" Sep 30 21:08:38 crc kubenswrapper[4603]: I0930 21:08:38.441399 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:08:38 crc kubenswrapper[4603]: I0930 21:08:38.441471 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:08:38 crc kubenswrapper[4603]: I0930 21:08:38.692611 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cfbc4c69-xfx2z_6d92de7a-d198-431c-a00c-bf93f63890c0/neutron-api/0.log" Sep 30 21:08:38 crc kubenswrapper[4603]: I0930 21:08:38.949367 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-85df5_404b3593-d4d1-4440-a645-8669f3676f09/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:40 crc kubenswrapper[4603]: I0930 21:08:40.160566 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_6f9d13ef-768e-47e0-aa2d-f21e801a8e3b/nova-cell0-conductor-conductor/0.log" Sep 30 21:08:40 crc kubenswrapper[4603]: I0930 21:08:40.568706 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bee4fa0d-d809-44ef-b123-a8ec31dda906/nova-api-log/0.log" Sep 30 21:08:40 crc kubenswrapper[4603]: I0930 21:08:40.935223 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_090bfcb2-0286-4e66-a22f-d79a55de8ff8/nova-cell1-conductor-conductor/0.log" Sep 30 21:08:40 crc kubenswrapper[4603]: I0930 21:08:40.947257 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_bee4fa0d-d809-44ef-b123-a8ec31dda906/nova-api-api/0.log" Sep 30 21:08:41 crc kubenswrapper[4603]: I0930 21:08:41.176957 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1e631e44-8a31-40d5-8463-cc93716e2a6c/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 21:08:41 crc kubenswrapper[4603]: I0930 21:08:41.401661 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_521a0a10-00f0-4bf2-8d0e-36ed170f6949/memcached/0.log" Sep 30 21:08:41 crc kubenswrapper[4603]: I0930 21:08:41.415979 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-tkcxx_cabbbd3f-3dec-4ae3-8ed3-5af87ae4bae9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:41 crc kubenswrapper[4603]: I0930 21:08:41.497328 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc2cc2ea-71b3-4874-8bcc-8504cb63b192/nova-metadata-log/0.log" Sep 30 21:08:41 crc kubenswrapper[4603]: I0930 21:08:41.993117 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/mysql-bootstrap/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.207817 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_8e3c4b82-8309-4c22-af40-2d9c530b0ef7/nova-scheduler-scheduler/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.243121 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/galera/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.267506 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_75e5a99f-1349-4c73-bb51-2f101b8dc2ab/mysql-bootstrap/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.508405 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/mysql-bootstrap/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.888967 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/mysql-bootstrap/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.925359 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_95d8cde0-0585-4e08-a44f-34a9ba7034ee/galera/0.log" Sep 30 21:08:42 crc kubenswrapper[4603]: I0930 21:08:42.969333 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_cc2cc2ea-71b3-4874-8bcc-8504cb63b192/nova-metadata-metadata/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.116033 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0aaa6eda-a979-4944-b575-6b987d1e32f3/openstackclient/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.282858 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-9hpwx_2911bc12-77af-4d68-858f-28d3cc2e263e/ovn-controller/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.296831 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rjs74_d06f3a9f-3191-4f74-8ccd-e765ca5d6613/openstack-network-exporter/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.475518 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server-init/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.681947 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server-init/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.711093 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovs-vswitchd/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.777216 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bjlrh_099f885d-5177-4906-9641-0a42249a549a/ovsdb-server/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.931037 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-gdhjt_09cd14dc-05cd-4a02-adde-bd6cc7b55643/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:43 crc kubenswrapper[4603]: I0930 21:08:43.977420 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f5abf540-1fba-4b2e-83c6-4be4e500f153/openstack-network-exporter/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.023844 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f5abf540-1fba-4b2e-83c6-4be4e500f153/ovn-northd/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.214521 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_69f6bf19-226d-409f-afba-67be196077f7/openstack-network-exporter/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.255734 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_69f6bf19-226d-409f-afba-67be196077f7/ovsdbserver-nb/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.392808 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9bb34996-e3b5-4c33-aff4-b85b34009e54/ovsdbserver-sb/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.460409 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_9bb34996-e3b5-4c33-aff4-b85b34009e54/openstack-network-exporter/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.685785 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c9d79bbb8-vkl5v_866dea6a-1003-486a-9893-5ede909f55dd/placement-api/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.771220 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/setup-container/0.log" Sep 30 21:08:44 crc kubenswrapper[4603]: I0930 21:08:44.905361 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-c9d79bbb8-vkl5v_866dea6a-1003-486a-9893-5ede909f55dd/placement-log/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.072002 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/rabbitmq/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.088291 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9af798ad-0a37-44c4-960f-d319d2c9f213/setup-container/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.125522 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/setup-container/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.305213 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/setup-container/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.351230 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-6lpt7_7b095899-7ded-4255-b88c-078c4e4f4d51/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.419254 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c52e492d-a859-4989-b9f6-91d03979296b/rabbitmq/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.579022 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-2xbs9_0b2fc65f-3fea-42f7-903c-22d5ca817ad8/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.725514 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gqw9h_b7c30cf9-2ad9-4627-8364-293fec61fef1/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.781750 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-wmv5z_aff20c40-8319-4474-970b-9e7d3a672838/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:45 crc kubenswrapper[4603]: I0930 21:08:45.968896 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-4tcb9_e8cf9e0f-89a8-4107-b6eb-3adc4978c983/ssh-known-hosts-edpm-deployment/0.log" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.109211 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5c4cbd6dd9-49g62_b5e87259-23eb-41cc-ba3a-ad1d47459e6a/proxy-httpd/0.log" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.474831 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.480096 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.495514 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.563114 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5c4cbd6dd9-49g62_b5e87259-23eb-41cc-ba3a-ad1d47459e6a/proxy-server/0.log" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.607845 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.608022 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.608204 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6rmw\" (UniqueName: \"kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.699203 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-h66ml_bdec4dd6-c244-40d6-89c3-0644dd9421de/swift-ring-rebalance/0.log" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.710390 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.710464 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.710516 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6rmw\" (UniqueName: \"kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.711237 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.711286 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.740006 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6rmw\" (UniqueName: \"kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw\") pod \"certified-operators-58js8\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:46 crc kubenswrapper[4603]: I0930 21:08:46.818602 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.283988 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-auditor/0.log" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.303090 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-reaper/0.log" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.391797 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-replicator/0.log" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.410539 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.490714 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/account-server/0.log" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.533740 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-auditor/0.log" Sep 30 21:08:47 crc kubenswrapper[4603]: I0930 21:08:47.603360 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-replicator/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.060738 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerStarted","Data":"c9d3e738d6b1077f121db91a3334b89c9fe1ba1c268cabe478d9535d83b82a4b"} Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.221999 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-server/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.331453 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/container-updater/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.374224 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-auditor/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.416222 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-expirer/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.478414 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-replicator/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.553530 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-server/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.578659 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/object-updater/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.606133 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/rsync/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.687293 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c71b4eca-ba52-40ee-88e6-f0b50794825d/swift-recon-cron/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.843845 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-ltndq_d4d2e2e5-2559-4ee2-801c-1c8d9917e367/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:48 crc kubenswrapper[4603]: I0930 21:08:48.967578 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2c3f4ced-b58c-409a-a046-b0803bdd6d44/tempest-tests-tempest-tests-runner/0.log" Sep 30 21:08:49 crc kubenswrapper[4603]: I0930 21:08:49.070240 4603 generic.go:334] "Generic (PLEG): container finished" podID="0641184a-d0ba-4f15-9976-f29c646a2013" containerID="56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a" exitCode=0 Sep 30 21:08:49 crc kubenswrapper[4603]: I0930 21:08:49.070294 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerDied","Data":"56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a"} Sep 30 21:08:49 crc kubenswrapper[4603]: I0930 21:08:49.092504 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f55ed2ad-4f61-4173-8ccb-9b67e72f494a/test-operator-logs-container/0.log" Sep 30 21:08:49 crc kubenswrapper[4603]: I0930 21:08:49.274048 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6kz72_2b982ca3-121d-442a-bd28-cf1623afe138/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:08:51 crc kubenswrapper[4603]: I0930 21:08:51.099559 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerStarted","Data":"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73"} Sep 30 21:08:52 crc kubenswrapper[4603]: I0930 21:08:52.108866 4603 generic.go:334] "Generic (PLEG): container finished" podID="0641184a-d0ba-4f15-9976-f29c646a2013" containerID="6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73" exitCode=0 Sep 30 21:08:52 crc kubenswrapper[4603]: I0930 21:08:52.108910 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerDied","Data":"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73"} Sep 30 21:08:53 crc kubenswrapper[4603]: I0930 21:08:53.120734 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerStarted","Data":"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb"} Sep 30 21:08:53 crc kubenswrapper[4603]: I0930 21:08:53.139224 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-58js8" podStartSLOduration=3.686848974 podStartE2EDuration="7.139209423s" podCreationTimestamp="2025-09-30 21:08:46 +0000 UTC" firstStartedPulling="2025-09-30 21:08:49.071854036 +0000 UTC m=+4931.010312854" lastFinishedPulling="2025-09-30 21:08:52.524214485 +0000 UTC m=+4934.462673303" observedRunningTime="2025-09-30 21:08:53.134345669 +0000 UTC m=+4935.072804487" watchObservedRunningTime="2025-09-30 21:08:53.139209423 +0000 UTC m=+4935.077668241" Sep 30 21:08:56 crc kubenswrapper[4603]: I0930 21:08:56.818852 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:56 crc kubenswrapper[4603]: I0930 21:08:56.820409 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:56 crc kubenswrapper[4603]: I0930 21:08:56.869646 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:57 crc kubenswrapper[4603]: I0930 21:08:57.203697 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:57 crc kubenswrapper[4603]: I0930 21:08:57.250320 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.170914 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-58js8" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="registry-server" containerID="cri-o://ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb" gracePeriod=2 Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.660229 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.746709 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content\") pod \"0641184a-d0ba-4f15-9976-f29c646a2013\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.747117 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6rmw\" (UniqueName: \"kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw\") pod \"0641184a-d0ba-4f15-9976-f29c646a2013\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.747290 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities\") pod \"0641184a-d0ba-4f15-9976-f29c646a2013\" (UID: \"0641184a-d0ba-4f15-9976-f29c646a2013\") " Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.748027 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities" (OuterVolumeSpecName: "utilities") pod "0641184a-d0ba-4f15-9976-f29c646a2013" (UID: "0641184a-d0ba-4f15-9976-f29c646a2013"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.749302 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.754129 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw" (OuterVolumeSpecName: "kube-api-access-t6rmw") pod "0641184a-d0ba-4f15-9976-f29c646a2013" (UID: "0641184a-d0ba-4f15-9976-f29c646a2013"). InnerVolumeSpecName "kube-api-access-t6rmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.803453 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0641184a-d0ba-4f15-9976-f29c646a2013" (UID: "0641184a-d0ba-4f15-9976-f29c646a2013"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.851553 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0641184a-d0ba-4f15-9976-f29c646a2013-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:59 crc kubenswrapper[4603]: I0930 21:08:59.851597 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6rmw\" (UniqueName: \"kubernetes.io/projected/0641184a-d0ba-4f15-9976-f29c646a2013-kube-api-access-t6rmw\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.179829 4603 generic.go:334] "Generic (PLEG): container finished" podID="0641184a-d0ba-4f15-9976-f29c646a2013" containerID="ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb" exitCode=0 Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.179878 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerDied","Data":"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb"} Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.179905 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-58js8" event={"ID":"0641184a-d0ba-4f15-9976-f29c646a2013","Type":"ContainerDied","Data":"c9d3e738d6b1077f121db91a3334b89c9fe1ba1c268cabe478d9535d83b82a4b"} Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.179910 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-58js8" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.179921 4603 scope.go:117] "RemoveContainer" containerID="ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.220238 4603 scope.go:117] "RemoveContainer" containerID="6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.233069 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.249992 4603 scope.go:117] "RemoveContainer" containerID="56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.253677 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-58js8"] Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.290692 4603 scope.go:117] "RemoveContainer" containerID="ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb" Sep 30 21:09:00 crc kubenswrapper[4603]: E0930 21:09:00.291403 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb\": container with ID starting with ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb not found: ID does not exist" containerID="ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.291432 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb"} err="failed to get container status \"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb\": rpc error: code = NotFound desc = could not find container \"ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb\": container with ID starting with ab1fe400ac367d22fbc027014e769d6b7513437da2e93e83d7c6e5587c641fcb not found: ID does not exist" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.291452 4603 scope.go:117] "RemoveContainer" containerID="6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73" Sep 30 21:09:00 crc kubenswrapper[4603]: E0930 21:09:00.291676 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73\": container with ID starting with 6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73 not found: ID does not exist" containerID="6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.291712 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73"} err="failed to get container status \"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73\": rpc error: code = NotFound desc = could not find container \"6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73\": container with ID starting with 6e75bc73cb8982ade5e8d2f4885a689c0047cbd87f6a47ba53532bc0d7663a73 not found: ID does not exist" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.291725 4603 scope.go:117] "RemoveContainer" containerID="56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a" Sep 30 21:09:00 crc kubenswrapper[4603]: E0930 21:09:00.291930 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a\": container with ID starting with 56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a not found: ID does not exist" containerID="56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.291968 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a"} err="failed to get container status \"56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a\": rpc error: code = NotFound desc = could not find container \"56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a\": container with ID starting with 56e0556e3f085c0b88df49b5875f186a1fbf78b76dee8fddf942e48b97e38f3a not found: ID does not exist" Sep 30 21:09:00 crc kubenswrapper[4603]: I0930 21:09:00.776054 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" path="/var/lib/kubelet/pods/0641184a-d0ba-4f15-9976-f29c646a2013/volumes" Sep 30 21:09:05 crc kubenswrapper[4603]: I0930 21:09:05.218543 4603 generic.go:334] "Generic (PLEG): container finished" podID="24e71c3d-b2fa-446a-ac77-735d7aba95af" containerID="5e019f1a7f7cb872dd433d9e15d7d7ab846d7267e83862cf769573d20eaae146" exitCode=0 Sep 30 21:09:05 crc kubenswrapper[4603]: I0930 21:09:05.218634 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-cnghr" event={"ID":"24e71c3d-b2fa-446a-ac77-735d7aba95af","Type":"ContainerDied","Data":"5e019f1a7f7cb872dd433d9e15d7d7ab846d7267e83862cf769573d20eaae146"} Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.327534 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.361794 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-cnghr"] Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.370036 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-cnghr"] Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.403301 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host\") pod \"24e71c3d-b2fa-446a-ac77-735d7aba95af\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.403380 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5kkg\" (UniqueName: \"kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg\") pod \"24e71c3d-b2fa-446a-ac77-735d7aba95af\" (UID: \"24e71c3d-b2fa-446a-ac77-735d7aba95af\") " Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.403397 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host" (OuterVolumeSpecName: "host") pod "24e71c3d-b2fa-446a-ac77-735d7aba95af" (UID: "24e71c3d-b2fa-446a-ac77-735d7aba95af"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.403843 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24e71c3d-b2fa-446a-ac77-735d7aba95af-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.423351 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg" (OuterVolumeSpecName: "kube-api-access-n5kkg") pod "24e71c3d-b2fa-446a-ac77-735d7aba95af" (UID: "24e71c3d-b2fa-446a-ac77-735d7aba95af"). InnerVolumeSpecName "kube-api-access-n5kkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.505037 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5kkg\" (UniqueName: \"kubernetes.io/projected/24e71c3d-b2fa-446a-ac77-735d7aba95af-kube-api-access-n5kkg\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:06 crc kubenswrapper[4603]: I0930 21:09:06.773569 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24e71c3d-b2fa-446a-ac77-735d7aba95af" path="/var/lib/kubelet/pods/24e71c3d-b2fa-446a-ac77-735d7aba95af/volumes" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.235920 4603 scope.go:117] "RemoveContainer" containerID="5e019f1a7f7cb872dd433d9e15d7d7ab846d7267e83862cf769573d20eaae146" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.236039 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-cnghr" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.566077 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8pvgv"] Sep 30 21:09:07 crc kubenswrapper[4603]: E0930 21:09:07.567093 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="registry-server" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567124 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="registry-server" Sep 30 21:09:07 crc kubenswrapper[4603]: E0930 21:09:07.567158 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="extract-content" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567189 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="extract-content" Sep 30 21:09:07 crc kubenswrapper[4603]: E0930 21:09:07.567214 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e71c3d-b2fa-446a-ac77-735d7aba95af" containerName="container-00" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567223 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e71c3d-b2fa-446a-ac77-735d7aba95af" containerName="container-00" Sep 30 21:09:07 crc kubenswrapper[4603]: E0930 21:09:07.567237 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="extract-utilities" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567245 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="extract-utilities" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567495 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e71c3d-b2fa-446a-ac77-735d7aba95af" containerName="container-00" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.567513 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0641184a-d0ba-4f15-9976-f29c646a2013" containerName="registry-server" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.568329 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.727384 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.727459 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llczx\" (UniqueName: \"kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.829318 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llczx\" (UniqueName: \"kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.829555 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.830024 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.850389 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llczx\" (UniqueName: \"kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx\") pod \"crc-debug-8pvgv\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:07 crc kubenswrapper[4603]: I0930 21:09:07.884308 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.246066 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" event={"ID":"0532d4e3-9ec8-4dd8-8eef-17cddc018416","Type":"ContainerStarted","Data":"66909eae22fe6a6e9a4403d2f34891dd335ef27218e26c942b3243f113908cc8"} Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.246420 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" event={"ID":"0532d4e3-9ec8-4dd8-8eef-17cddc018416","Type":"ContainerStarted","Data":"ee1e88dc781698c9aaa7e6e1cfcc9cae77c33cc7a6489967710f09a4282605e0"} Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.262976 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" podStartSLOduration=1.2629544959999999 podStartE2EDuration="1.262954496s" podCreationTimestamp="2025-09-30 21:09:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:09:08.259341086 +0000 UTC m=+4950.197799904" watchObservedRunningTime="2025-09-30 21:09:08.262954496 +0000 UTC m=+4950.201413314" Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.442260 4603 patch_prober.go:28] interesting pod/machine-config-daemon-g8q5x container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.442313 4603 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.442354 4603 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.443091 4603 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b"} pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:09:08 crc kubenswrapper[4603]: I0930 21:09:08.443142 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerName="machine-config-daemon" containerID="cri-o://910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" gracePeriod=600 Sep 30 21:09:08 crc kubenswrapper[4603]: E0930 21:09:08.574985 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:09:09 crc kubenswrapper[4603]: I0930 21:09:09.262149 4603 generic.go:334] "Generic (PLEG): container finished" podID="3adf7280-9c4a-403e-8605-b5e5897f3521" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" exitCode=0 Sep 30 21:09:09 crc kubenswrapper[4603]: I0930 21:09:09.262231 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerDied","Data":"910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b"} Sep 30 21:09:09 crc kubenswrapper[4603]: I0930 21:09:09.262262 4603 scope.go:117] "RemoveContainer" containerID="d60e4a3288ec08fc20b6946b61d5e9e65660183c2ae1c86024dbf43ee2cc908f" Sep 30 21:09:09 crc kubenswrapper[4603]: I0930 21:09:09.262833 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:09:09 crc kubenswrapper[4603]: E0930 21:09:09.263117 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:09:10 crc kubenswrapper[4603]: I0930 21:09:10.271640 4603 generic.go:334] "Generic (PLEG): container finished" podID="0532d4e3-9ec8-4dd8-8eef-17cddc018416" containerID="66909eae22fe6a6e9a4403d2f34891dd335ef27218e26c942b3243f113908cc8" exitCode=0 Sep 30 21:09:10 crc kubenswrapper[4603]: I0930 21:09:10.271929 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" event={"ID":"0532d4e3-9ec8-4dd8-8eef-17cddc018416","Type":"ContainerDied","Data":"66909eae22fe6a6e9a4403d2f34891dd335ef27218e26c942b3243f113908cc8"} Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.422054 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.543906 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llczx\" (UniqueName: \"kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx\") pod \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.544379 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host\") pod \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\" (UID: \"0532d4e3-9ec8-4dd8-8eef-17cddc018416\") " Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.544956 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host" (OuterVolumeSpecName: "host") pod "0532d4e3-9ec8-4dd8-8eef-17cddc018416" (UID: "0532d4e3-9ec8-4dd8-8eef-17cddc018416"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.552730 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx" (OuterVolumeSpecName: "kube-api-access-llczx") pod "0532d4e3-9ec8-4dd8-8eef-17cddc018416" (UID: "0532d4e3-9ec8-4dd8-8eef-17cddc018416"). InnerVolumeSpecName "kube-api-access-llczx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.646047 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llczx\" (UniqueName: \"kubernetes.io/projected/0532d4e3-9ec8-4dd8-8eef-17cddc018416-kube-api-access-llczx\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:11 crc kubenswrapper[4603]: I0930 21:09:11.646079 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0532d4e3-9ec8-4dd8-8eef-17cddc018416-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:12 crc kubenswrapper[4603]: I0930 21:09:12.289426 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" event={"ID":"0532d4e3-9ec8-4dd8-8eef-17cddc018416","Type":"ContainerDied","Data":"ee1e88dc781698c9aaa7e6e1cfcc9cae77c33cc7a6489967710f09a4282605e0"} Sep 30 21:09:12 crc kubenswrapper[4603]: I0930 21:09:12.289481 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee1e88dc781698c9aaa7e6e1cfcc9cae77c33cc7a6489967710f09a4282605e0" Sep 30 21:09:12 crc kubenswrapper[4603]: I0930 21:09:12.289493 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8pvgv" Sep 30 21:09:15 crc kubenswrapper[4603]: I0930 21:09:15.991920 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8pvgv"] Sep 30 21:09:16 crc kubenswrapper[4603]: I0930 21:09:16.005942 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8pvgv"] Sep 30 21:09:16 crc kubenswrapper[4603]: I0930 21:09:16.782969 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0532d4e3-9ec8-4dd8-8eef-17cddc018416" path="/var/lib/kubelet/pods/0532d4e3-9ec8-4dd8-8eef-17cddc018416/volumes" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.198182 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8j45v"] Sep 30 21:09:17 crc kubenswrapper[4603]: E0930 21:09:17.198882 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0532d4e3-9ec8-4dd8-8eef-17cddc018416" containerName="container-00" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.198897 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="0532d4e3-9ec8-4dd8-8eef-17cddc018416" containerName="container-00" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.199069 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="0532d4e3-9ec8-4dd8-8eef-17cddc018416" containerName="container-00" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.200269 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.351824 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cg7f\" (UniqueName: \"kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.351969 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.454007 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cg7f\" (UniqueName: \"kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.454132 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.454208 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.477985 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cg7f\" (UniqueName: \"kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f\") pod \"crc-debug-8j45v\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:17 crc kubenswrapper[4603]: I0930 21:09:17.526915 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:18 crc kubenswrapper[4603]: I0930 21:09:18.361845 4603 generic.go:334] "Generic (PLEG): container finished" podID="471340b6-2235-47ec-9dc4-499c5ae0ee24" containerID="0f95de077203f3c2eb724a830c58a2a6f3d0e2acf5410ad06978b205d254e1ed" exitCode=0 Sep 30 21:09:18 crc kubenswrapper[4603]: I0930 21:09:18.361871 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8j45v" event={"ID":"471340b6-2235-47ec-9dc4-499c5ae0ee24","Type":"ContainerDied","Data":"0f95de077203f3c2eb724a830c58a2a6f3d0e2acf5410ad06978b205d254e1ed"} Sep 30 21:09:18 crc kubenswrapper[4603]: I0930 21:09:18.362416 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/crc-debug-8j45v" event={"ID":"471340b6-2235-47ec-9dc4-499c5ae0ee24","Type":"ContainerStarted","Data":"fe77cd9bedca006bc285e0bfd07213a6a1f03b507c7339f7ea78a0c4d039afcc"} Sep 30 21:09:18 crc kubenswrapper[4603]: I0930 21:09:18.410255 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8j45v"] Sep 30 21:09:18 crc kubenswrapper[4603]: I0930 21:09:18.420382 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-svmwc/crc-debug-8j45v"] Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.479856 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.594345 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host\") pod \"471340b6-2235-47ec-9dc4-499c5ae0ee24\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.594397 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cg7f\" (UniqueName: \"kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f\") pod \"471340b6-2235-47ec-9dc4-499c5ae0ee24\" (UID: \"471340b6-2235-47ec-9dc4-499c5ae0ee24\") " Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.594430 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host" (OuterVolumeSpecName: "host") pod "471340b6-2235-47ec-9dc4-499c5ae0ee24" (UID: "471340b6-2235-47ec-9dc4-499c5ae0ee24"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.594780 4603 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/471340b6-2235-47ec-9dc4-499c5ae0ee24-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.599657 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f" (OuterVolumeSpecName: "kube-api-access-4cg7f") pod "471340b6-2235-47ec-9dc4-499c5ae0ee24" (UID: "471340b6-2235-47ec-9dc4-499c5ae0ee24"). InnerVolumeSpecName "kube-api-access-4cg7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:09:19 crc kubenswrapper[4603]: I0930 21:09:19.696993 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cg7f\" (UniqueName: \"kubernetes.io/projected/471340b6-2235-47ec-9dc4-499c5ae0ee24-kube-api-access-4cg7f\") on node \"crc\" DevicePath \"\"" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.233960 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.379931 4603 scope.go:117] "RemoveContainer" containerID="0f95de077203f3c2eb724a830c58a2a6f3d0e2acf5410ad06978b205d254e1ed" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.379982 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/crc-debug-8j45v" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.506386 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.507275 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.546319 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.711993 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/util/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.718743 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/extract/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.747694 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_050c3a3292b5248f443d5517b5afdc2fc794b6859cfcf8a83bd900d34fdh7p4_6941dd13-432d-4bb3-a789-54d719d95d42/pull/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.774520 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="471340b6-2235-47ec-9dc4-499c5ae0ee24" path="/var/lib/kubelet/pods/471340b6-2235-47ec-9dc4-499c5ae0ee24/volumes" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.901906 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-qgzmq_e17e463e-0a04-457a-a014-480772f91871/kube-rbac-proxy/0.log" Sep 30 21:09:20 crc kubenswrapper[4603]: I0930 21:09:20.976305 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-qgzmq_e17e463e-0a04-457a-a014-480772f91871/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.081673 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-62mwn_b9de699a-42fd-40f8-94e3-ccddd9f2e6c2/kube-rbac-proxy/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.191688 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-62mwn_b9de699a-42fd-40f8-94e3-ccddd9f2e6c2/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.296225 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kn7c7_63389a19-bdd5-4862-a0b0-f93a5df19823/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.309662 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-kn7c7_63389a19-bdd5-4862-a0b0-f93a5df19823/kube-rbac-proxy/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.455984 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-p6fms_3e6da4be-f92f-48ee-85e4-f316da7f6e27/kube-rbac-proxy/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.519877 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-p6fms_3e6da4be-f92f-48ee-85e4-f316da7f6e27/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.608963 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-2m98j_9caa0cb4-2c14-430d-ac4a-942c78ec844e/kube-rbac-proxy/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.659419 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-2m98j_9caa0cb4-2c14-430d-ac4a-942c78ec844e/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.764505 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:09:21 crc kubenswrapper[4603]: E0930 21:09:21.764741 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.787026 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-s29xr_3fb36813-9cc2-4668-ad3a-da10b9594f8a/kube-rbac-proxy/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.911240 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-s29xr_3fb36813-9cc2-4668-ad3a-da10b9594f8a/manager/0.log" Sep 30 21:09:21 crc kubenswrapper[4603]: I0930 21:09:21.949182 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-pszrb_e0158f35-7f0f-4c77-b761-6b624fc675f0/kube-rbac-proxy/0.log" Sep 30 21:09:22 crc kubenswrapper[4603]: I0930 21:09:22.202022 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-pszrb_e0158f35-7f0f-4c77-b761-6b624fc675f0/manager/0.log" Sep 30 21:09:22 crc kubenswrapper[4603]: I0930 21:09:22.672007 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-n2qf4_5b507d22-1613-4e76-948f-e4d55f160473/manager/0.log" Sep 30 21:09:22 crc kubenswrapper[4603]: I0930 21:09:22.743413 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-n2qf4_5b507d22-1613-4e76-948f-e4d55f160473/kube-rbac-proxy/0.log" Sep 30 21:09:22 crc kubenswrapper[4603]: I0930 21:09:22.859449 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-bxvz5_ac8d681e-168c-401e-9529-54098a214435/kube-rbac-proxy/0.log" Sep 30 21:09:22 crc kubenswrapper[4603]: I0930 21:09:22.959820 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-bxvz5_ac8d681e-168c-401e-9529-54098a214435/manager/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.014743 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-mg4sm_961055da-fa39-4301-b30d-f0a61d41371a/kube-rbac-proxy/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.087539 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-mg4sm_961055da-fa39-4301-b30d-f0a61d41371a/manager/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.180962 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-fd845_17b3f01d-c7ac-4b96-a90b-02c645fa27ed/kube-rbac-proxy/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.271304 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-fd845_17b3f01d-c7ac-4b96-a90b-02c645fa27ed/manager/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.337778 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6cvcn_45381319-4688-4802-a937-e804b3d0e6b1/kube-rbac-proxy/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.407904 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6cvcn_45381319-4688-4802-a937-e804b3d0e6b1/manager/0.log" Sep 30 21:09:23 crc kubenswrapper[4603]: I0930 21:09:23.470712 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-6l5w5_3a7017fa-c8d0-493d-a338-ec3d2626a289/kube-rbac-proxy/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.129480 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fbmst_e262072a-1f18-48fa-a2af-73466cc9a40b/kube-rbac-proxy/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.142801 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fbmst_e262072a-1f18-48fa-a2af-73466cc9a40b/manager/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.265258 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-6l5w5_3a7017fa-c8d0-493d-a338-ec3d2626a289/manager/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.342024 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-gcb85_e6858f69-2a71-4459-89d4-59939c74b778/kube-rbac-proxy/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.352550 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-gcb85_e6858f69-2a71-4459-89d4-59939c74b778/manager/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.488751 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78964744f9-tbqf5_7c79bcd3-52f0-4d6b-8814-65ccfe3e9577/kube-rbac-proxy/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.631531 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dc7f48c86-wfrdl_e52d3ccd-fc80-4261-9043-2def9da416b6/kube-rbac-proxy/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.795098 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dc7f48c86-wfrdl_e52d3ccd-fc80-4261-9043-2def9da416b6/operator/0.log" Sep 30 21:09:24 crc kubenswrapper[4603]: I0930 21:09:24.911510 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-zgzz6_62cd7c09-702d-4432-a6ef-89900b8d4705/registry-server/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.065303 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lcsw7_453ca8a5-9f93-4ad9-a0ef-14858d949b08/kube-rbac-proxy/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.103726 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-lcsw7_453ca8a5-9f93-4ad9-a0ef-14858d949b08/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.191227 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-dqmkb_e0835976-81c4-4f6f-aad4-0af0341168e2/kube-rbac-proxy/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.348641 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-dqmkb_e0835976-81c4-4f6f-aad4-0af0341168e2/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.510312 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-bnn7x_0ef59238-520a-4221-8a49-40a4e1a1049d/operator/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.617825 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78964744f9-tbqf5_7c79bcd3-52f0-4d6b-8814-65ccfe3e9577/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.652201 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-z7l94_3a536984-9465-496f-9cfb-f48e32bd0c1b/kube-rbac-proxy/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.674338 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-z7l94_3a536984-9465-496f-9cfb-f48e32bd0c1b/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.784553 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-28kqj_ae8ee517-97d6-422e-a058-c229d111e654/kube-rbac-proxy/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.907277 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-28kqj_ae8ee517-97d6-422e-a058-c229d111e654/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.916615 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-h8j45_c1669984-9655-488e-a243-0a48f9e381c1/manager/0.log" Sep 30 21:09:25 crc kubenswrapper[4603]: I0930 21:09:25.931626 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-h8j45_c1669984-9655-488e-a243-0a48f9e381c1/kube-rbac-proxy/0.log" Sep 30 21:09:26 crc kubenswrapper[4603]: I0930 21:09:26.077926 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gvcj_58ddfa7e-b740-4d7d-ba1e-22d3c81a5870/manager/0.log" Sep 30 21:09:26 crc kubenswrapper[4603]: I0930 21:09:26.081501 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-2gvcj_58ddfa7e-b740-4d7d-ba1e-22d3c81a5870/kube-rbac-proxy/0.log" Sep 30 21:09:34 crc kubenswrapper[4603]: I0930 21:09:34.764479 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:09:34 crc kubenswrapper[4603]: E0930 21:09:34.765272 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:09:44 crc kubenswrapper[4603]: I0930 21:09:44.083307 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nfzcr_0e2e94ed-63a4-4335-8edd-67b592965119/control-plane-machine-set-operator/0.log" Sep 30 21:09:44 crc kubenswrapper[4603]: I0930 21:09:44.154609 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fgjdh_f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4/kube-rbac-proxy/0.log" Sep 30 21:09:44 crc kubenswrapper[4603]: I0930 21:09:44.205043 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fgjdh_f7cf8e7d-d59f-42d1-92e4-1bc9f69740d4/machine-api-operator/0.log" Sep 30 21:09:47 crc kubenswrapper[4603]: I0930 21:09:47.764190 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:09:47 crc kubenswrapper[4603]: E0930 21:09:47.764834 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:09:57 crc kubenswrapper[4603]: I0930 21:09:57.023865 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-x4ppt_9fd7e687-d7c9-4656-9665-491bbec118a0/cert-manager-controller/0.log" Sep 30 21:09:57 crc kubenswrapper[4603]: I0930 21:09:57.239249 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-phxbq_b091df7c-bb72-483d-a232-76684ca02eeb/cert-manager-cainjector/0.log" Sep 30 21:09:57 crc kubenswrapper[4603]: I0930 21:09:57.325545 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-q7ttm_02bbad49-bd1e-4b2a-bcaf-e87517081eab/cert-manager-webhook/0.log" Sep 30 21:09:58 crc kubenswrapper[4603]: I0930 21:09:58.773363 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:09:58 crc kubenswrapper[4603]: E0930 21:09:58.773839 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:10:09 crc kubenswrapper[4603]: I0930 21:10:09.915222 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-zrpwd_bffd6d89-d0ee-4fff-b026-afada4f9ef81/nmstate-console-plugin/0.log" Sep 30 21:10:10 crc kubenswrapper[4603]: I0930 21:10:10.064504 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-85hq5_78744fb1-2861-4f48-ac88-15cc146d4602/nmstate-handler/0.log" Sep 30 21:10:10 crc kubenswrapper[4603]: I0930 21:10:10.771700 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-cpplb_cc0f8a40-4ff0-47cd-be21-9b3659cad490/nmstate-metrics/0.log" Sep 30 21:10:10 crc kubenswrapper[4603]: I0930 21:10:10.792890 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-cpplb_cc0f8a40-4ff0-47cd-be21-9b3659cad490/kube-rbac-proxy/0.log" Sep 30 21:10:10 crc kubenswrapper[4603]: I0930 21:10:10.962564 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-zlch6_bed1084e-39ed-437d-83ba-ae195cd14423/nmstate-operator/0.log" Sep 30 21:10:11 crc kubenswrapper[4603]: I0930 21:10:11.012816 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-lcqr7_e4caa662-2ce2-4110-bdd5-989f27772b4c/nmstate-webhook/0.log" Sep 30 21:10:11 crc kubenswrapper[4603]: I0930 21:10:11.763975 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:10:11 crc kubenswrapper[4603]: E0930 21:10:11.764579 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.433931 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-fg62p_4c195024-2cbe-4d5a-93f9-9cf1d5380440/controller/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.454491 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-fg62p_4c195024-2cbe-4d5a-93f9-9cf1d5380440/kube-rbac-proxy/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.639625 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.765038 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:10:25 crc kubenswrapper[4603]: E0930 21:10:25.765405 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.883572 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.911012 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.934240 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:10:25 crc kubenswrapper[4603]: I0930 21:10:25.976913 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.191754 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.260989 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.367382 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.375453 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.512924 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-frr-files/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.547218 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-reloader/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.561853 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/cp-metrics/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.683570 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/controller/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.801072 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/frr-metrics/0.log" Sep 30 21:10:26 crc kubenswrapper[4603]: I0930 21:10:26.889718 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/kube-rbac-proxy/0.log" Sep 30 21:10:27 crc kubenswrapper[4603]: I0930 21:10:27.000958 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/kube-rbac-proxy-frr/0.log" Sep 30 21:10:27 crc kubenswrapper[4603]: I0930 21:10:27.127662 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/reloader/0.log" Sep 30 21:10:27 crc kubenswrapper[4603]: I0930 21:10:27.408574 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-hv78l_a308f483-402e-4254-a1c8-440883cde4b9/frr-k8s-webhook-server/0.log" Sep 30 21:10:27 crc kubenswrapper[4603]: I0930 21:10:27.525252 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7b54d77bb4-zvnfm_eaf8636d-76cf-40c3-9e77-1b898b6e00be/manager/0.log" Sep 30 21:10:27 crc kubenswrapper[4603]: I0930 21:10:27.680595 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-696dffd44d-bfmjn_3aad58f2-49aa-472d-a347-92a699c7c78a/webhook-server/0.log" Sep 30 21:10:28 crc kubenswrapper[4603]: I0930 21:10:28.019036 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s59z_6d18b2f5-97ae-442e-af33-cc7f501a33fe/frr/0.log" Sep 30 21:10:28 crc kubenswrapper[4603]: I0930 21:10:28.188759 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-87tdm_ead35fe0-3ed6-4cb2-943c-1f3609f978d3/kube-rbac-proxy/0.log" Sep 30 21:10:28 crc kubenswrapper[4603]: I0930 21:10:28.534943 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-87tdm_ead35fe0-3ed6-4cb2-943c-1f3609f978d3/speaker/0.log" Sep 30 21:10:40 crc kubenswrapper[4603]: I0930 21:10:40.764700 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:10:40 crc kubenswrapper[4603]: E0930 21:10:40.765354 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.220544 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.441656 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.444924 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.486597 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.664489 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/extract/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.684952 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/pull/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.741646 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcszp96_bd351257-d9aa-4b23-ac84-d67cb081eee7/util/0.log" Sep 30 21:10:42 crc kubenswrapper[4603]: I0930 21:10:42.851851 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.076667 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.093555 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.099532 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.206445 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-utilities/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.280086 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/extract-content/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.553945 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.811994 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.823945 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.864561 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:10:43 crc kubenswrapper[4603]: I0930 21:10:43.984152 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nmwg4_2f3491da-e6fc-4233-b117-34c80f1c2085/registry-server/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.077416 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-utilities/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.157732 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/extract-content/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.463923 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.721461 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.737419 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.797914 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.808183 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jxpv6_b08e6044-208e-45ae-a648-665c6c96c0aa/registry-server/0.log" Sep 30 21:10:44 crc kubenswrapper[4603]: I0930 21:10:44.939413 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/util/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.019256 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/extract/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.063151 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96gnjmh_28552f7e-e802-46cc-8250-2a91a3b81f4c/pull/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.156810 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-dg8pl_cc7471b5-5468-4585-a14c-dec890fce87f/marketplace-operator/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.335000 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.523675 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.562621 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.568819 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.854475 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-utilities/0.log" Sep 30 21:10:45 crc kubenswrapper[4603]: I0930 21:10:45.945358 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/extract-content/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.025776 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.176146 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-9n76h_bec44cb0-0ba0-4168-9bc6-96216f3266b7/registry-server/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.255610 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.263048 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.312361 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.486908 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-content/0.log" Sep 30 21:10:46 crc kubenswrapper[4603]: I0930 21:10:46.592416 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/extract-utilities/0.log" Sep 30 21:10:47 crc kubenswrapper[4603]: I0930 21:10:47.006869 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9rn7_6e95bbaf-6456-4d51-ba65-63cb2948bf52/registry-server/0.log" Sep 30 21:10:55 crc kubenswrapper[4603]: I0930 21:10:55.764837 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:10:55 crc kubenswrapper[4603]: E0930 21:10:55.765760 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:11:08 crc kubenswrapper[4603]: I0930 21:11:08.770988 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:11:08 crc kubenswrapper[4603]: E0930 21:11:08.771823 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:11:18 crc kubenswrapper[4603]: E0930 21:11:18.982616 4603 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.129:51116->38.102.83.129:39427: read tcp 38.102.83.129:51116->38.102.83.129:39427: read: connection reset by peer Sep 30 21:11:19 crc kubenswrapper[4603]: I0930 21:11:19.764139 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:11:19 crc kubenswrapper[4603]: E0930 21:11:19.764609 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:11:33 crc kubenswrapper[4603]: I0930 21:11:33.764448 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:11:33 crc kubenswrapper[4603]: E0930 21:11:33.765229 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:11:48 crc kubenswrapper[4603]: I0930 21:11:48.776592 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:11:48 crc kubenswrapper[4603]: E0930 21:11:48.777407 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:12:02 crc kubenswrapper[4603]: I0930 21:12:02.763798 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:12:02 crc kubenswrapper[4603]: E0930 21:12:02.765254 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:12:14 crc kubenswrapper[4603]: I0930 21:12:14.764445 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:12:14 crc kubenswrapper[4603]: E0930 21:12:14.765480 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:12:27 crc kubenswrapper[4603]: I0930 21:12:27.768806 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:12:27 crc kubenswrapper[4603]: E0930 21:12:27.769802 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:12:38 crc kubenswrapper[4603]: I0930 21:12:38.765246 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:12:38 crc kubenswrapper[4603]: E0930 21:12:38.766571 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:12:52 crc kubenswrapper[4603]: I0930 21:12:52.764437 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:12:52 crc kubenswrapper[4603]: E0930 21:12:52.766617 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:13:07 crc kubenswrapper[4603]: I0930 21:13:07.764276 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:13:07 crc kubenswrapper[4603]: E0930 21:13:07.765289 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:13:15 crc kubenswrapper[4603]: I0930 21:13:15.720125 4603 generic.go:334] "Generic (PLEG): container finished" podID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerID="c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4" exitCode=0 Sep 30 21:13:15 crc kubenswrapper[4603]: I0930 21:13:15.720356 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-svmwc/must-gather-b2ljj" event={"ID":"13faae60-7a72-43f4-83a9-5e67ea0c55e1","Type":"ContainerDied","Data":"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4"} Sep 30 21:13:15 crc kubenswrapper[4603]: I0930 21:13:15.721703 4603 scope.go:117] "RemoveContainer" containerID="c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4" Sep 30 21:13:16 crc kubenswrapper[4603]: I0930 21:13:16.305663 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-svmwc_must-gather-b2ljj_13faae60-7a72-43f4-83a9-5e67ea0c55e1/gather/0.log" Sep 30 21:13:18 crc kubenswrapper[4603]: I0930 21:13:18.776088 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:13:18 crc kubenswrapper[4603]: E0930 21:13:18.776837 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.417038 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-svmwc/must-gather-b2ljj"] Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.418214 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-svmwc/must-gather-b2ljj" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="copy" containerID="cri-o://43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f" gracePeriod=2 Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.436269 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-svmwc/must-gather-b2ljj"] Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.770677 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:13:30 crc kubenswrapper[4603]: E0930 21:13:30.771535 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.867465 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-svmwc_must-gather-b2ljj_13faae60-7a72-43f4-83a9-5e67ea0c55e1/copy/0.log" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.870130 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.878599 4603 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-svmwc_must-gather-b2ljj_13faae60-7a72-43f4-83a9-5e67ea0c55e1/copy/0.log" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.879037 4603 generic.go:334] "Generic (PLEG): container finished" podID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerID="43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f" exitCode=143 Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.879085 4603 scope.go:117] "RemoveContainer" containerID="43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.879351 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-svmwc/must-gather-b2ljj" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.899622 4603 scope.go:117] "RemoveContainer" containerID="c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.958377 4603 scope.go:117] "RemoveContainer" containerID="43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f" Sep 30 21:13:30 crc kubenswrapper[4603]: E0930 21:13:30.958801 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f\": container with ID starting with 43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f not found: ID does not exist" containerID="43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.958833 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f"} err="failed to get container status \"43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f\": rpc error: code = NotFound desc = could not find container \"43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f\": container with ID starting with 43e6a6f809bf22a047f7798fefe1d384c83c58f809ec24aaf637f914645c180f not found: ID does not exist" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.958853 4603 scope.go:117] "RemoveContainer" containerID="c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4" Sep 30 21:13:30 crc kubenswrapper[4603]: E0930 21:13:30.959238 4603 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4\": container with ID starting with c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4 not found: ID does not exist" containerID="c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4" Sep 30 21:13:30 crc kubenswrapper[4603]: I0930 21:13:30.959253 4603 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4"} err="failed to get container status \"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4\": rpc error: code = NotFound desc = could not find container \"c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4\": container with ID starting with c12ce7cd08d502daa4f3eb9567285a0c5e6ab5f831e384c16a406d8bc78bbef4 not found: ID does not exist" Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.066613 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output\") pod \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.066761 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dnw7\" (UniqueName: \"kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7\") pod \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\" (UID: \"13faae60-7a72-43f4-83a9-5e67ea0c55e1\") " Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.076462 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7" (OuterVolumeSpecName: "kube-api-access-8dnw7") pod "13faae60-7a72-43f4-83a9-5e67ea0c55e1" (UID: "13faae60-7a72-43f4-83a9-5e67ea0c55e1"). InnerVolumeSpecName "kube-api-access-8dnw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.169099 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dnw7\" (UniqueName: \"kubernetes.io/projected/13faae60-7a72-43f4-83a9-5e67ea0c55e1-kube-api-access-8dnw7\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.259497 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "13faae60-7a72-43f4-83a9-5e67ea0c55e1" (UID: "13faae60-7a72-43f4-83a9-5e67ea0c55e1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:13:31 crc kubenswrapper[4603]: I0930 21:13:31.271436 4603 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/13faae60-7a72-43f4-83a9-5e67ea0c55e1-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 21:13:32 crc kubenswrapper[4603]: I0930 21:13:32.780800 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" path="/var/lib/kubelet/pods/13faae60-7a72-43f4-83a9-5e67ea0c55e1/volumes" Sep 30 21:13:45 crc kubenswrapper[4603]: I0930 21:13:45.765212 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:13:45 crc kubenswrapper[4603]: E0930 21:13:45.766214 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:13:58 crc kubenswrapper[4603]: I0930 21:13:58.779810 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:13:58 crc kubenswrapper[4603]: E0930 21:13:58.781910 4603 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8q5x_openshift-machine-config-operator(3adf7280-9c4a-403e-8605-b5e5897f3521)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" podUID="3adf7280-9c4a-403e-8605-b5e5897f3521" Sep 30 21:14:09 crc kubenswrapper[4603]: I0930 21:14:09.765356 4603 scope.go:117] "RemoveContainer" containerID="910f6165c59d7d8a21de4d973cce63ba8cd818a694cb3f71db78e91ba864734b" Sep 30 21:14:10 crc kubenswrapper[4603]: I0930 21:14:10.307830 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8q5x" event={"ID":"3adf7280-9c4a-403e-8605-b5e5897f3521","Type":"ContainerStarted","Data":"856d679d12d28610142f963d0da3625db6f3bceada7752c0def5aad35e6c58fd"} Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.948692 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:32 crc kubenswrapper[4603]: E0930 21:14:32.949642 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="copy" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949654 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="copy" Sep 30 21:14:32 crc kubenswrapper[4603]: E0930 21:14:32.949670 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="471340b6-2235-47ec-9dc4-499c5ae0ee24" containerName="container-00" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949680 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="471340b6-2235-47ec-9dc4-499c5ae0ee24" containerName="container-00" Sep 30 21:14:32 crc kubenswrapper[4603]: E0930 21:14:32.949699 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="gather" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949704 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="gather" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949897 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="471340b6-2235-47ec-9dc4-499c5ae0ee24" containerName="container-00" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949910 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="gather" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.949924 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="13faae60-7a72-43f4-83a9-5e67ea0c55e1" containerName="copy" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.951241 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:32 crc kubenswrapper[4603]: I0930 21:14:32.980222 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.086810 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.086884 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.086972 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tddj2\" (UniqueName: \"kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.188716 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tddj2\" (UniqueName: \"kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.189051 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.189096 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.189521 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.189627 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.207890 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tddj2\" (UniqueName: \"kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2\") pod \"community-operators-kdfj8\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.290011 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:33 crc kubenswrapper[4603]: I0930 21:14:33.872966 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:34 crc kubenswrapper[4603]: I0930 21:14:34.557602 4603 generic.go:334] "Generic (PLEG): container finished" podID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerID="d7238c9f2f18f08f7fc0cac95ede75225aea3ede52fd30f1531ec1daad5c0c4a" exitCode=0 Sep 30 21:14:34 crc kubenswrapper[4603]: I0930 21:14:34.557669 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerDied","Data":"d7238c9f2f18f08f7fc0cac95ede75225aea3ede52fd30f1531ec1daad5c0c4a"} Sep 30 21:14:34 crc kubenswrapper[4603]: I0930 21:14:34.557718 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerStarted","Data":"e6cc25e13c09f992886167a60ed45a4a3609d5b4ac40942d1d8cd66f5b0e9016"} Sep 30 21:14:34 crc kubenswrapper[4603]: I0930 21:14:34.559860 4603 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:14:35 crc kubenswrapper[4603]: I0930 21:14:35.578154 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerStarted","Data":"bf5995e7f9e7497aed8f5e973dd265e4a7ed1e79a425f97492207d5be90c3714"} Sep 30 21:14:37 crc kubenswrapper[4603]: I0930 21:14:37.595992 4603 generic.go:334] "Generic (PLEG): container finished" podID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerID="bf5995e7f9e7497aed8f5e973dd265e4a7ed1e79a425f97492207d5be90c3714" exitCode=0 Sep 30 21:14:37 crc kubenswrapper[4603]: I0930 21:14:37.596178 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerDied","Data":"bf5995e7f9e7497aed8f5e973dd265e4a7ed1e79a425f97492207d5be90c3714"} Sep 30 21:14:38 crc kubenswrapper[4603]: I0930 21:14:38.608048 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerStarted","Data":"60d7ea4011489ca53dab64bf14b9a92a5723c976e54e1cf4f182bd991da897bb"} Sep 30 21:14:38 crc kubenswrapper[4603]: I0930 21:14:38.637639 4603 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kdfj8" podStartSLOduration=3.152536631 podStartE2EDuration="6.637621277s" podCreationTimestamp="2025-09-30 21:14:32 +0000 UTC" firstStartedPulling="2025-09-30 21:14:34.559664805 +0000 UTC m=+5276.498123623" lastFinishedPulling="2025-09-30 21:14:38.044749451 +0000 UTC m=+5279.983208269" observedRunningTime="2025-09-30 21:14:38.630378366 +0000 UTC m=+5280.568837194" watchObservedRunningTime="2025-09-30 21:14:38.637621277 +0000 UTC m=+5280.576080095" Sep 30 21:14:43 crc kubenswrapper[4603]: I0930 21:14:43.290335 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:43 crc kubenswrapper[4603]: I0930 21:14:43.290862 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:43 crc kubenswrapper[4603]: I0930 21:14:43.365659 4603 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:43 crc kubenswrapper[4603]: I0930 21:14:43.738936 4603 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:43 crc kubenswrapper[4603]: I0930 21:14:43.806653 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:45 crc kubenswrapper[4603]: I0930 21:14:45.692621 4603 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kdfj8" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="registry-server" containerID="cri-o://60d7ea4011489ca53dab64bf14b9a92a5723c976e54e1cf4f182bd991da897bb" gracePeriod=2 Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.712463 4603 generic.go:334] "Generic (PLEG): container finished" podID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerID="60d7ea4011489ca53dab64bf14b9a92a5723c976e54e1cf4f182bd991da897bb" exitCode=0 Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.712539 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerDied","Data":"60d7ea4011489ca53dab64bf14b9a92a5723c976e54e1cf4f182bd991da897bb"} Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.712843 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kdfj8" event={"ID":"de12191a-990d-4a30-8b00-15759e6ea1a5","Type":"ContainerDied","Data":"e6cc25e13c09f992886167a60ed45a4a3609d5b4ac40942d1d8cd66f5b0e9016"} Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.712871 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6cc25e13c09f992886167a60ed45a4a3609d5b4ac40942d1d8cd66f5b0e9016" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.756020 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.857296 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tddj2\" (UniqueName: \"kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2\") pod \"de12191a-990d-4a30-8b00-15759e6ea1a5\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.857533 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content\") pod \"de12191a-990d-4a30-8b00-15759e6ea1a5\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.857567 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities\") pod \"de12191a-990d-4a30-8b00-15759e6ea1a5\" (UID: \"de12191a-990d-4a30-8b00-15759e6ea1a5\") " Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.858695 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities" (OuterVolumeSpecName: "utilities") pod "de12191a-990d-4a30-8b00-15759e6ea1a5" (UID: "de12191a-990d-4a30-8b00-15759e6ea1a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.863690 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2" (OuterVolumeSpecName: "kube-api-access-tddj2") pod "de12191a-990d-4a30-8b00-15759e6ea1a5" (UID: "de12191a-990d-4a30-8b00-15759e6ea1a5"). InnerVolumeSpecName "kube-api-access-tddj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.915909 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de12191a-990d-4a30-8b00-15759e6ea1a5" (UID: "de12191a-990d-4a30-8b00-15759e6ea1a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.959878 4603 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.960280 4603 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de12191a-990d-4a30-8b00-15759e6ea1a5-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:46 crc kubenswrapper[4603]: I0930 21:14:46.960295 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tddj2\" (UniqueName: \"kubernetes.io/projected/de12191a-990d-4a30-8b00-15759e6ea1a5-kube-api-access-tddj2\") on node \"crc\" DevicePath \"\"" Sep 30 21:14:47 crc kubenswrapper[4603]: I0930 21:14:47.724357 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kdfj8" Sep 30 21:14:47 crc kubenswrapper[4603]: I0930 21:14:47.781318 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:47 crc kubenswrapper[4603]: I0930 21:14:47.792983 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kdfj8"] Sep 30 21:14:48 crc kubenswrapper[4603]: I0930 21:14:48.786480 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" path="/var/lib/kubelet/pods/de12191a-990d-4a30-8b00-15759e6ea1a5/volumes" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.171584 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq"] Sep 30 21:15:00 crc kubenswrapper[4603]: E0930 21:15:00.173275 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="registry-server" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.173308 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="registry-server" Sep 30 21:15:00 crc kubenswrapper[4603]: E0930 21:15:00.173396 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="extract-content" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.173414 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="extract-content" Sep 30 21:15:00 crc kubenswrapper[4603]: E0930 21:15:00.173495 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="extract-utilities" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.173516 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="extract-utilities" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.174007 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="de12191a-990d-4a30-8b00-15759e6ea1a5" containerName="registry-server" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.175287 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.177829 4603 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.179027 4603 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.180232 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq"] Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.238470 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.238590 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.238657 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pnk7\" (UniqueName: \"kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.341677 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.341753 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pnk7\" (UniqueName: \"kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.341906 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.342945 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.350973 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.368041 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pnk7\" (UniqueName: \"kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7\") pod \"collect-profiles-29321115-lbplq\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:00 crc kubenswrapper[4603]: I0930 21:15:00.510105 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:01 crc kubenswrapper[4603]: I0930 21:15:01.012866 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq"] Sep 30 21:15:01 crc kubenswrapper[4603]: I0930 21:15:01.889365 4603 generic.go:334] "Generic (PLEG): container finished" podID="1bd50d38-6d61-481a-898a-89a1cd5ccce1" containerID="0c20a3034d302a73882fc6d9650baf72efeae2117c26c8ab21719404ff4ed7e9" exitCode=0 Sep 30 21:15:01 crc kubenswrapper[4603]: I0930 21:15:01.889470 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" event={"ID":"1bd50d38-6d61-481a-898a-89a1cd5ccce1","Type":"ContainerDied","Data":"0c20a3034d302a73882fc6d9650baf72efeae2117c26c8ab21719404ff4ed7e9"} Sep 30 21:15:01 crc kubenswrapper[4603]: I0930 21:15:01.889752 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" event={"ID":"1bd50d38-6d61-481a-898a-89a1cd5ccce1","Type":"ContainerStarted","Data":"2c1ed8c9a6738bb4fe73486d2c30c4c83b5c3e5caecd58bec06138ab31008d5c"} Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.320910 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.394627 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume\") pod \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.395053 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pnk7\" (UniqueName: \"kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7\") pod \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.395097 4603 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume\") pod \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\" (UID: \"1bd50d38-6d61-481a-898a-89a1cd5ccce1\") " Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.396237 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume" (OuterVolumeSpecName: "config-volume") pod "1bd50d38-6d61-481a-898a-89a1cd5ccce1" (UID: "1bd50d38-6d61-481a-898a-89a1cd5ccce1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.401414 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1bd50d38-6d61-481a-898a-89a1cd5ccce1" (UID: "1bd50d38-6d61-481a-898a-89a1cd5ccce1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.402428 4603 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7" (OuterVolumeSpecName: "kube-api-access-2pnk7") pod "1bd50d38-6d61-481a-898a-89a1cd5ccce1" (UID: "1bd50d38-6d61-481a-898a-89a1cd5ccce1"). InnerVolumeSpecName "kube-api-access-2pnk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.497457 4603 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pnk7\" (UniqueName: \"kubernetes.io/projected/1bd50d38-6d61-481a-898a-89a1cd5ccce1-kube-api-access-2pnk7\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.497486 4603 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1bd50d38-6d61-481a-898a-89a1cd5ccce1-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.497496 4603 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1bd50d38-6d61-481a-898a-89a1cd5ccce1-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.912157 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" event={"ID":"1bd50d38-6d61-481a-898a-89a1cd5ccce1","Type":"ContainerDied","Data":"2c1ed8c9a6738bb4fe73486d2c30c4c83b5c3e5caecd58bec06138ab31008d5c"} Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.912226 4603 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321115-lbplq" Sep 30 21:15:03 crc kubenswrapper[4603]: I0930 21:15:03.912245 4603 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c1ed8c9a6738bb4fe73486d2c30c4c83b5c3e5caecd58bec06138ab31008d5c" Sep 30 21:15:04 crc kubenswrapper[4603]: I0930 21:15:04.401290 4603 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5"] Sep 30 21:15:04 crc kubenswrapper[4603]: I0930 21:15:04.410810 4603 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-dlnb5"] Sep 30 21:15:04 crc kubenswrapper[4603]: I0930 21:15:04.788424 4603 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7efceb0-c8e0-4158-a53f-f6ff648341c3" path="/var/lib/kubelet/pods/e7efceb0-c8e0-4158-a53f-f6ff648341c3/volumes" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.732920 4603 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cvmkn"] Sep 30 21:15:11 crc kubenswrapper[4603]: E0930 21:15:11.734058 4603 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd50d38-6d61-481a-898a-89a1cd5ccce1" containerName="collect-profiles" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.734076 4603 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd50d38-6d61-481a-898a-89a1cd5ccce1" containerName="collect-profiles" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.734409 4603 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bd50d38-6d61-481a-898a-89a1cd5ccce1" containerName="collect-profiles" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.736218 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.757274 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvmkn"] Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.908148 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7bpf\" (UniqueName: \"kubernetes.io/projected/9cf152b2-3a0a-4c42-b797-724b455ea7da-kube-api-access-c7bpf\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.908437 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-utilities\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:11 crc kubenswrapper[4603]: I0930 21:15:11.908503 4603 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-catalog-content\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.010945 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7bpf\" (UniqueName: \"kubernetes.io/projected/9cf152b2-3a0a-4c42-b797-724b455ea7da-kube-api-access-c7bpf\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.011703 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-utilities\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.011760 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-utilities\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.011855 4603 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-catalog-content\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.012183 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cf152b2-3a0a-4c42-b797-724b455ea7da-catalog-content\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.036460 4603 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7bpf\" (UniqueName: \"kubernetes.io/projected/9cf152b2-3a0a-4c42-b797-724b455ea7da-kube-api-access-c7bpf\") pod \"redhat-operators-cvmkn\" (UID: \"9cf152b2-3a0a-4c42-b797-724b455ea7da\") " pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.061096 4603 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvmkn" Sep 30 21:15:12 crc kubenswrapper[4603]: I0930 21:15:12.523729 4603 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvmkn"] Sep 30 21:15:13 crc kubenswrapper[4603]: I0930 21:15:13.013561 4603 generic.go:334] "Generic (PLEG): container finished" podID="9cf152b2-3a0a-4c42-b797-724b455ea7da" containerID="6006d0dfe83a0a4d078e499fcb66632d26cd1cc1976b546d7e097aceaeaee2de" exitCode=0 Sep 30 21:15:13 crc kubenswrapper[4603]: I0930 21:15:13.013678 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvmkn" event={"ID":"9cf152b2-3a0a-4c42-b797-724b455ea7da","Type":"ContainerDied","Data":"6006d0dfe83a0a4d078e499fcb66632d26cd1cc1976b546d7e097aceaeaee2de"} Sep 30 21:15:13 crc kubenswrapper[4603]: I0930 21:15:13.014988 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvmkn" event={"ID":"9cf152b2-3a0a-4c42-b797-724b455ea7da","Type":"ContainerStarted","Data":"5b692532e6b124d5fa2ad15195226457e61c9544d86fb0661a06dccc94617b8d"} Sep 30 21:15:15 crc kubenswrapper[4603]: I0930 21:15:15.030430 4603 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvmkn" event={"ID":"9cf152b2-3a0a-4c42-b797-724b455ea7da","Type":"ContainerStarted","Data":"eddda33899e609868bad5d274bbb3ddd191ddaa7891e9042797c4cdbd3b51de2"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067044152024451 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067044152017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067031206016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067031206015455 5ustar corecore